code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
from kubernetes.config.config_exception import ConfigException # noqa: F401
from kubernetes.config.incluster_config import load_incluster_config # noqa: F401
from kubernetes.config.kube_config import list_kube_config_contexts, load_kube_config # noqa: F401
from .kube_config import new_client_from_config # noqa: F401
|
chouseknecht/openshift-restclient-python
|
openshift/config/__init__.py
|
Python
|
apache-2.0
| 322
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import six
import pickle
import numpy as np
import paddle
from paddle import compat as cpt
from paddle.fluid import core
from paddle.fluid import framework
from paddle.fluid import backward
from paddle.fluid import unique_name
from paddle.fluid.dygraph import layers
from paddle.fluid.layers import nn
from paddle.fluid.layers.utils import _hash_with_id
from paddle.fluid.dygraph.base import switch_to_static_graph
from paddle.fluid.framework import in_dygraph_mode
__all__ = ['TranslatedLayer']
INFER_MODEL_SUFFIX = ".pdmodel"
INFER_PARAMS_SUFFIX = ".pdiparams"
INFER_PARAMS_INFO_SUFFIX = ".pdiparams.info"
LOADED_VAR_SUFFIX = "load"
PARAMETER_NAME_PREFIX = "param"
BUFFER_NAME_PREFIX = "buffer"
def _load_program_desc(model_file_path):
# 1. parse program desc
with open(model_file_path, "rb") as f:
program_desc_str = f.read()
program_desc = core.ProgramDesc(program_desc_str)
if not core._is_program_version_supported(program_desc._version()):
raise ValueError("Unsupported program version: %d\n" %
program_desc._version())
return program_desc
def _is_persistable(var_desc):
if var_desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
var_desc.type() == core.VarDesc.VarType.FETCH_LIST or \
var_desc.type() == core.VarDesc.VarType.READER or \
var_desc.type() == core.VarDesc.VarType.RAW:
return False
return var_desc.persistable()
def _is_parameter(persistable_var_desc, program_desc):
# 1. firstly, param should be input of op
input_ops = [] # op can be repeated
for block_idx in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(block_idx)
for op_idx in six.moves.range(block.op_size()):
op = block.op(op_idx)
# NOTE: parameter is the input of a certain op
if persistable_var_desc.name() in op.input_arg_names():
input_ops.append(op)
# 2. secondly, param should not be output of op or be same op's output
for block_idx in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(block_idx)
for op_idx in six.moves.range(block.op_size()):
op = block.op(op_idx)
if persistable_var_desc.name() in op.output_arg_names():
# such as batch_norm_op
if op in input_ops:
continue
else:
return False
return True
def _get_persistable_vars(program_desc):
persistable_vars = []
for i in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(i)
persistable_vars.extend(list(filter(_is_persistable, block.all_vars())))
return persistable_vars
def _get_persistable_var_names(program_desc):
"""
Get all persistable variable names in ProgramDesc.
"""
var_names = []
persistable_vars = _get_persistable_vars(program_desc)
for var in persistable_vars:
var_names.append(var.name())
return var_names
def _get_all_var_names(program_desc):
all_var_names = set()
for i in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(i)
for var in block.all_vars():
all_var_names.add(var.name())
return all_var_names
@switch_to_static_graph
def _append_loaded_suffix(name):
"""
Append loaded suffix to the given variable name
e.g. x ==> x.load_0, x.load_0 ==> x.load_0.load_0
"""
suffix = LOADED_VAR_SUFFIX
name = cpt.to_text(name)
new_name = unique_name.generate_with_ignorable_key('.'.join((name, suffix)))
return new_name
@switch_to_static_graph
def _generate_unique_var_name(prefix):
return unique_name.generate_with_ignorable_key(prefix)
def _append_loaded_suffix_to_var(program_desc):
suffix_varname_dict = dict()
persistable_vars = _get_persistable_vars(program_desc)
for var_desc in persistable_vars:
old_name = var_desc.name()
new_name = _append_loaded_suffix(var_desc.name())
suffix_varname_dict[new_name] = old_name
var_desc.set_name(new_name)
for block_idx in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(block_idx)
block._rename_var(cpt.to_bytes(old_name), cpt.to_bytes(new_name))
for op_idx in six.moves.range(block.op_size()):
op = block.op(op_idx)
op._rename_input(old_name, new_name)
op._rename_output(old_name, new_name)
return suffix_varname_dict
@switch_to_static_graph
def _generate_unique_var_name_sync_with_main_program(prefix):
return unique_name.generate(prefix)
def _get_loaded_var_new_old(program_desc, all_new_old_dict_all):
new_old_dict = dict()
persistable_vars = _get_persistable_vars(program_desc)
for var_desc in persistable_vars:
name_new = var_desc.name()
new_old_dict[name_new] = all_new_old_dict_all[name_new]
return new_old_dict
def _rename_var_program_desc(program_desc, include=None, exclude=None):
"""
Change the name of the loaded variables.Use 'unique_name.generate' to avoid duplication.
It is used when loading multiple program during inference.
e.g. linear_0.tmp_3 ==> linear_0.tmp_1, x ==> x_0. For double grad, x@GRAD ==> x_0@GRAD
If 'include' is not `None`,variables in include and the corresponding
double grad variables (if exist) are renamed.
If 'exclude' is not `None`,variables that are in exclude and the
corresponding double grad variables (if exist) are not renamed.
Args:
program_desc(ProgramDesc):the variables in it will be modified.
include(List):list of names of variables.
exclude(List):list of names of variables.
Returns:
tuple of (dict_rename_var_new_old, dict_rename_var_old_new)
dict_rename_var_new_old is a dict mapping from new name to old name
dict_rename_var_old_new is a dict mapping from old name to new name
"""
dict_rename_var_old_new = dict()
dict_rename_var_new_old = dict()
old_names = []
# Store all old names
for b_idx in six.moves.range(program_desc.num_blocks()):
cur_block = program_desc.block(b_idx)
for var in cur_block.all_vars():
old_names.append(var.name())
# Create dict_rename_var_new_old and dict_rename_var_old_new for non double
# grad variables
has_double_grad = False
for b_idx in six.moves.range(program_desc.num_blocks()):
cur_block = program_desc.block(b_idx)
for var_idx, var in enumerate(cur_block.all_vars()):
name_old = var.name()
is_double_grad_var = "@GRAD" in name_old
has_double_grad = has_double_grad or is_double_grad_var
should_rename = (include is None or name_old in include) and (
exclude is None or
name_old not in exclude) and not is_double_grad_var
if should_rename:
temp_name = name_old.split('_')
if len(temp_name) > 1 and temp_name[-1].isnumeric():
temp_name = "_".join(temp_name[:-1])
else:
temp_name = name_old
while True:
name_new = _generate_unique_var_name_sync_with_main_program(
temp_name)
if name_new not in old_names[:var_idx] + old_names[var_idx +
1:]:
break
else:
name_new = name_old
if name_old != name_new:
cur_block._rename_var(
cpt.to_bytes(name_old), cpt.to_bytes(name_new))
if not is_double_grad_var:
dict_rename_var_old_new[name_old] = name_new
dict_rename_var_new_old[name_new] = name_old
# Handle double grad names
if has_double_grad:
double_grad_rename_dict = {}
for name_old in dict_rename_var_old_new:
for b_idx in six.moves.range(program_desc.num_blocks()):
cur_block = program_desc.block(b_idx)
for var_idx, var in enumerate(cur_block.all_vars()):
var_name = var.name()
if "@GRAD" in var_name and name_old in var_name:
new_var_name = var_name.replace(
name_old, dict_rename_var_old_new[name_old])
double_grad_rename_dict[var_name] = new_var_name
for var_name in double_grad_rename_dict:
dict_rename_var_old_new[var_name] = double_grad_rename_dict[
var_name]
dict_rename_var_new_old[double_grad_rename_dict[
var_name]] = var_name
# Rename on program desc
for b_idx in six.moves.range(program_desc.num_blocks()):
cur_block = program_desc.block(b_idx)
for op_idx in six.moves.range(cur_block.op_size()):
op = cur_block.op(op_idx)
for input_arg_name in op.input_arg_names():
if input_arg_name in dict_rename_var_old_new:
if input_arg_name != dict_rename_var_old_new[
input_arg_name]:
op._rename_input(
input_arg_name,
dict_rename_var_old_new[input_arg_name])
if cur_block.has_var(cpt.to_bytes(input_arg_name)):
cur_block._rename_var(
cpt.to_bytes(input_arg_name),
cpt.to_bytes(dict_rename_var_old_new[
input_arg_name]))
for output_arg_name in op.output_arg_names():
if output_arg_name in dict_rename_var_old_new:
if output_arg_name != dict_rename_var_old_new[
output_arg_name]:
op._rename_output(
output_arg_name,
dict_rename_var_old_new[output_arg_name])
if cur_block.has_var(cpt.to_bytes(output_arg_name)):
cur_block._rename_var(
cpt.to_bytes(output_arg_name),
cpt.to_bytes(dict_rename_var_old_new[
output_arg_name]))
program_desc.flush()
return dict_rename_var_new_old, dict_rename_var_old_new
@switch_to_static_graph
def _build_program_by_desc(program_desc):
prog = framework.Program()
prog.desc = program_desc
prog.blocks = [
framework.Block(prog, i)
for i in six.moves.range(prog.desc.num_blocks())
]
prog._sync_with_cpp()
return prog
def _change_is_test_status(program_desc, is_test):
# change all `is_test` attributes
for i in six.moves.range(program_desc.num_blocks()):
block = program_desc.block(i)
for j in six.moves.range(block.op_size()):
op = block.op(j)
if op.has_attr('is_test'):
op._set_attr('is_test', is_test)
class _ProgramHolder(object):
"""
Holds the execution information of a Program.
_ProgramHolder is the execution unit of TranslatedLayer,
if TranslatedLayer contains multiple _ProgramHolder,
it can execute multiple methods
_ProgramHolder is an internal concept.
"""
def __init__(self, program_desc):
super(_ProgramHolder, self).__init__()
# input, output, persistable, double_grads var info
self._input_descs = []
self._output_descs = []
self._double_grad_descs = []
self._persistable_names = []
# execution scope
self._inner_scope = core.Scope()
# append suffix var name dict
self._suffix_varname_dict = None
# forward program
self._infer_program_desc = self._preprocess(program_desc)
# forward + backward program
self._train_program_desc = self._append_backward_desc(
self._infer_program_desc)
@property
def infer_program(self):
return self._infer_program_desc
@property
def train_program(self):
return self._train_program_desc
@property
def input_descs(self):
return self._input_descs
@property
def output_descs(self):
return self._output_descs
@property
def persistable_names(self):
return self._persistable_names
@property
def double_grad_descs(self):
return self._double_grad_descs
@property
def scope(self):
return self._inner_scope
def _preprocess(self, program_desc):
# rename persistable variables of 'program_desc'
list_persistable_var = _get_persistable_var_names(program_desc)
rename_new_old_dict, _ = _rename_var_program_desc(program_desc,
list_persistable_var)
# 1. Prune original program
# remove feed, fetch and scale-1 op, remove op_callstack attr
ops_to_remove = []
root_block = program_desc.block(0)
for i in six.moves.range(root_block.op_size()):
op = root_block.op(i)
if op.type() == 'feed':
ops_to_remove.append(i)
feed_var_name = cpt.to_bytes(op.input('X')[0])
root_block._remove_var(feed_var_name)
self._input_descs.append(
root_block.find_var(cpt.to_bytes(op.output('Out')[0])))
elif op.type() == 'scale' and op.output('Out')[0].startswith(
'save_infer_model/scale_'):
ops_to_remove.append(i)
out_var_name = cpt.to_bytes(op.output('Out')[0])
root_block._remove_var(out_var_name)
self._output_descs.append(
root_block.find_var(cpt.to_bytes(op.input('X')[0])))
elif op.type() == 'fetch':
ops_to_remove.append(i)
fetch_var_name = cpt.to_bytes(op.output('Out')[0])
root_block._remove_var(fetch_var_name)
# NOTE: some old pre-train models have no extra scale_op
if not op.input('X')[0].startswith('save_infer_model/scale_'):
self._output_descs.append(
root_block.find_var(cpt.to_bytes(op.input('X')[0])))
else:
if op.has_attr("op_callstack"):
op.remove_attr("op_callstack")
for op_idx in reversed(ops_to_remove):
root_block._remove_op(op_idx, op_idx + 1)
for i in range(program_desc.num_blocks()):
block_desc = program_desc.block(i)
for var_desc in block_desc.all_vars():
if "@GRAD" in var_desc.name():
self._double_grad_descs.append(var_desc)
# 2. Input processing, reverse feed vars
self._input_descs.reverse()
# 3. Output processing, add scale for outputs
tmp_program = _build_program_by_desc(program_desc)
# NOTE: [why need append scale for outputs]
# When dealing with some more complex pre-training models, there
# will be situations where the pre-training model has multiple
# fetch outputs. In the scenario of multiple fetch outputs,
# there is a special case where multiple outputs of the model
# may be on the same branch. According to the user's subsequent
# use, multiple outputs may be associated with multiple branches.
# These subsequent operations are added in TranslatedLayer is
# agnostic during initialization, which results in subsequent
# gradient accumulation operations that are required on the
# output node in the middle of the branch will not be performed,
# resulting in error, details see pull request:
# [https://github.com/PaddlePaddle/Paddle/pull/24627]
self._append_scale_to_output(tmp_program)
# 4. Persistable vars processing
# - append loaded suffix to persistable vars
# NOTE: [why need to append suffix to persistable vars]
# Dygraph and static graph mode use the same naming mechanism.
# If users want to load the model fine-tune, it is possible
# to add the existing Layer in the loaded model to enhance
# the network. For example, the original saved model has linear,
# and later after loading, a new linear is added. At this time,
# there will be a problem of duplicate names, so here is unified
# to add the LOADED suffix to the parameters of the model loaded
self._suffix_varname_dict = _get_loaded_var_new_old(program_desc,
rename_new_old_dict)
# - get persistable var
self._persistable_names = _get_persistable_var_names(program_desc)
return program_desc
@switch_to_static_graph
def _append_scale_to_output(self, program):
# 1. append scale & save var
scale_output_vars = []
with framework.program_guard(program):
for i, out in enumerate(self._output_descs):
var = program.global_block().var(out.name())
var = nn.scale(
var, 1., name="translated_layer/scale_{}".format(i))
scale_output_vars.append(var)
# 2. update output names & descs
for i, var in enumerate(scale_output_vars):
self._output_descs[i] = var.desc
@switch_to_static_graph
def _append_backward_desc(self, infer_program_desc):
program_desc_copy = core.ProgramDesc(infer_program_desc)
# 1. set all `is_test` attributes to False
_change_is_test_status(program_desc_copy, False)
# 2. prepare program and related var
# NOTE: To reuse backward interfaces, build Program firstly.
# Originally, there is no need to build a program, but need to almost
# rewrite a series of methods for append_backward for program_desc.
# Therefore, in order to reuse the method of backward.py, build the program here.
program = _build_program_by_desc(program_desc_copy)
# 3. Add the outputs which is only used for training and not saved in
# inference program.
for block_idx in six.moves.range(program.num_blocks):
block = program.block(block_idx)
for op in block.ops:
if op.type == "batch_norm":
if "ReserveSpace" not in op.output_names or len(
op.output("ReserveSpace")) == 0:
reserve_space = block.create_var(
name=unique_name.generate_with_ignorable_key(
".".join(["reserve_space", 'tmp'])),
dtype=block.var(op.input("X")[0]).dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=True)
op.desc.set_output("ReserveSpace", [reserve_space.name])
targets = []
for out in self._output_descs:
targets.append(program.global_block().var(out.name()))
# 3. append backward
backward.gradients(targets=targets, inputs=[])
return program.desc
# [ TranslatedLayer : Run program in imperative mode ]
#
# DESIGN IDEA: using an special operator `RunProgram`, execute program inside operator.
#
# Op's Inputs:
# - the input variable of the user feed
# - the necessary parameters of the network
# Op's Outputs:
# - the output variable of fetch
#
# This op receives a complete program desc, internally creates scope
# and executor, executes this program. Key points:
#
# 1. Data Sharing:
# The varBase of the dynamic graph is not in the scope, so before the op
# executes the program internally, create persistent variables with the
# same name as feed, parameters, and fetch in the scope, and share the
# LoDTensor of the op input.
#
# 2. Forward and Backward Separation:
# Because the dynamic graph op performs the forward and backward separately,
# in the forward op RunProgram, we only execute the forward part of whole program,
# and in the backward op RunProgramGrad, we execute the backward part of program.
# We can not separate the program into forward and backward part, which will
# make some control flow execution logic wrong.
# NOTE: [compatible] deal with model saved by save_inference_model,
# which need get var info from program desc
def _load_persistable_vars_by_program(model_path,
program_holder,
params_filename=None):
# make sure the path has been checked
persistable_vars = _get_persistable_vars(program_holder.infer_program)
load_var_dict = {}
for each_var in persistable_vars:
orig_each_name = program_holder._suffix_varname_dict[each_var.name()]
if _is_parameter(each_var, program_holder.infer_program):
# create output varbase
new_var = framework.ParamBase(
shape=each_var.shape(),
dtype=each_var.dtype(),
name=each_var.name(),
type=each_var.type(),
persistable=True)
else:
new_var = framework._varbase_creator(
type=each_var.type(),
name=each_var.name(),
shape=each_var.shape(),
dtype=each_var.dtype(),
persistable=True)
if params_filename is None:
framework._dygraph_tracer().trace_op(
type='load',
inputs={},
outputs={'Out': new_var},
attrs={'file_path': os.path.join(model_path, orig_each_name)})
new_var.stop_gradient = False
load_var_dict[each_var.name()] = new_var
if params_filename is not None:
load_var_list = []
dict_name_old_new = {
v: k
for k, v in program_holder._suffix_varname_dict.items()
}
for name in sorted(dict_name_old_new.keys()):
load_var_list.append(load_var_dict[dict_name_old_new[name]])
framework._dygraph_tracer().trace_op(
type='load_combine',
inputs={},
outputs={'Out': load_var_list},
attrs={'file_path': os.path.join(model_path, params_filename)})
for each_var in persistable_vars:
if not _is_parameter(each_var, program_holder.infer_program):
continue
param = load_var_dict[each_var.name()]
param.stop_gradient = False
# NOTE: [Recovery stop gradient information based on the program]
# After loading the model, the stop_gradient information
# of the original variable is lost, but if a parameter does not
# have a corresponding @GRAD variable in the backward program,
# it can be said that it is also stop_gradient
all_var_names = _get_all_var_names(program_holder.train_program)
for var_name in load_var_dict:
grad_var_name = var_name + core.grad_var_suffix()
if grad_var_name not in all_var_names:
load_var_dict[var_name].stop_gradient = True
return load_var_dict
def _load_persistable_vars(model_path, var_info_path, program_holder,
params_filename):
# 1. load extra var info
with open(var_info_path, 'rb') as f:
extra_var_info = pickle.load(f)
# 2. construct var dict
load_var_dict = dict()
load_var_list = []
inv_suffix_varname_dict = {
value: key
for key, value in program_holder._suffix_varname_dict.items()
}
# NOTE(chenweihang): we need load persistable vars based the program,
# because the program may be pruned when `save_inference_model`, some
# var in `extra_var_info` may have been pruned
for name in sorted(inv_suffix_varname_dict):
if name not in extra_var_info:
raise RuntimeError(
"The model to be loaded is not complete."
"The variable `%s` of program cannot be found in loaded model.",
name)
# get suffix var name, see [why need to append suffix to persistable vars]
new_name = inv_suffix_varname_dict[name]
# create output varbase
if extra_var_info[name].get('trainable', None) is not None:
# use default shape and dtype
new_var = framework.ParamBase(
shape=[1], # only to pass check, this shape is not meaningful
dtype=core.VarDesc.VarType.FP32,
name=new_name,
persistable=True)
else:
new_var = framework._varbase_creator(
name=new_name, persistable=True)
new_var.stop_gradient = extra_var_info[name]['stop_gradient']
load_var_dict[new_name] = new_var
load_var_list.append(new_var)
# 3. load all vars
assert params_filename is not None, "params_filename should not be None."
var_file_path = os.path.join(model_path, params_filename)
if not os.path.exists(var_file_path):
if len(extra_var_info) != 0:
raise ValueError("The model to be loaded is incomplete.")
else:
framework._dygraph_tracer().trace_op(
type='load_combine',
inputs={},
outputs={'Out': load_var_list},
attrs={'file_path': var_file_path})
return load_var_dict
# NOTE(chenweihang): to adapt paddle.load to get state_dict
def _remove_varname_suffix(var_dict, program_holder):
no_suffix_var_dict = dict()
for var_name in var_dict:
no_suffix_name = program_holder._suffix_varname_dict[var_name]
no_suffix_var_dict[no_suffix_name] = var_dict[var_name]
return no_suffix_var_dict
def _construct_program_holders(model_path, model_filename=None):
# make sure the path has been checked
program_holder_dict = dict()
if model_filename is not None:
# [compatible] if assign model_filename, only can load one program as Layer.forward
model_filename = os.path.basename(model_filename)
model_file_path = os.path.join(model_path, model_filename)
model_name = model_filename[:-len(INFER_MODEL_SUFFIX)]
#Load every file that meets the requirements in the directory model_path.
for filename in os.listdir(model_path):
if model_filename == filename:
func_name = 'forward'
model_file_path = os.path.join(model_path, model_filename)
elif filename.endswith(INFER_MODEL_SUFFIX) and filename.startswith(
model_name):
parsing_names = filename[len(model_name):-len(
INFER_MODEL_SUFFIX) + 1].split('.')
if len(parsing_names) == 3 and len(parsing_names[1]) > 0:
func_name = parsing_names[1]
model_file_path = os.path.join(model_path, filename)
else:
continue
else:
continue
program_holder_dict[func_name] = _ProgramHolder(
_load_program_desc(model_file_path))
else:
for _, _, file_names in os.walk(model_path):
for name in file_names:
if 'model' in name:
model_file_path = os.path.join(model_path, name)
method_name = name.strip('_')
if method_name == 'model':
method_name = 'forward'
else:
method_name.replace('model', '')
program_holder_dict[method_name] = _ProgramHolder(
_load_program_desc(model_file_path))
return program_holder_dict
def _construct_params_and_buffers(model_path,
programs,
params_filename=None,
append_suffix=True):
var_info_filename = str(params_filename) + ".info"
var_info_path = os.path.join(model_path, var_info_filename)
params_path = os.path.join(model_path, str(params_filename))
if os.path.exists(var_info_path):
var_dict = _load_persistable_vars(model_path, var_info_path,
programs['forward'], params_filename)
model_name = params_filename[:-len(INFER_PARAMS_SUFFIX)]
#Load every file that meets the requirements in the directory model_path.
for file_name in os.listdir(model_path):
if file_name.startswith(model_name) and file_name.endswith(
INFER_PARAMS_SUFFIX):
parsing_names = file_name[len(model_name):-len(
INFER_PARAMS_SUFFIX) + 1].split('.')
if len(parsing_names) == 3 and len(parsing_names[1]) > 0:
func_name = parsing_names[1]
else:
continue
else:
continue
var_info_path = os.path.join(model_path, var_info_filename)
var_dict.update(
_load_persistable_vars(model_path, var_info_path, programs[
func_name], file_name))
elif params_filename is not None and not os.path.exists(params_path):
# When saving XX, there is only '*.pdmodel'
return dict()
else:
var_dict = _load_persistable_vars_by_program(
model_path, programs['forward'], params_filename)
if not append_suffix:
var_dict = _remove_varname_suffix(var_dict, programs['forward'])
return var_dict
def _run_dygraph(instance, input, program_holder):
# 1. prepare inputs, outputs, attrs
input_vars = []
for i, value in enumerate(input):
if not isinstance(value, (np.ndarray, core.VarBase)):
raise TypeError(
"The type of input in TranslatedLayer must be numpy array or Variable(VarBase), but received %s."
% type(value))
# NOTE: In order to unify the API, firstly convert the input to VarBase
if isinstance(value, np.ndarray):
var = core.VarBase(
value=value,
name=program_holder.input_descs[i].name(),
persistable=False,
place=framework._current_expected_place(),
zero_copy=True)
else:
var = value
# NOTE: we changed var name here,
# but it may be an important name set by user
var.name = program_holder.input_descs[i].name()
input_vars.append(var)
if instance._input_args_names is None:
instance._input_args_names = [
ins.name() for ins in program_holder.input_descs
]
persistable_vars = []
for var_name in program_holder.persistable_names:
dy_var_name = instance._persistable_var_name_dict[var_name]
if dy_var_name in instance._parameters:
persistable_vars.append(instance._parameters[dy_var_name])
elif dy_var_name in instance._buffers:
persistable_vars.append(instance._buffers[dy_var_name])
else:
raise ValueError(
"The persistable variable %s does not exist in current TranslatedLayer."
% var_name)
output_vars = []
for var_desc in program_holder.output_descs:
var = core.VarBase(var_desc.dtype(),
var_desc.shape(),
var_desc.name(), var_desc.type(), False)
output_vars.append(var)
# hold forward variables
tmp_scope_vec = core.VarBase(core.VarDesc.VarType.FP32, [],
"program_out_scope",
core.VarDesc.VarType.STEP_SCOPES, True)
tmp_scope_vec.value().set_scope(program_holder.scope)
double_grad_vars = []
for var_desc in program_holder.double_grad_descs:
var = core.VarBase(var_desc.dtype(),
var_desc.shape(),
var_desc.name(), var_desc.type(), False)
double_grad_vars.append(var)
if len(double_grad_vars) == 0:
double_grad_vars = [
core.VarBase(
value=[1],
name='Fake_var',
place=framework._current_expected_place())
]
# 2. run program by op
trace_program = program_holder.infer_program if instance._is_test else program_holder.train_program
end_op_index = program_holder.infer_program.block(0).op_size()
framework._dygraph_tracer().trace_op(
type='run_program',
inputs={'X': input_vars,
'Params': persistable_vars},
outputs={
'Out': output_vars,
'OutScope': tmp_scope_vec,
'DOut': double_grad_vars
},
attrs={
'global_block': trace_program.block(0),
'start_op_index': 0,
'end_op_index': end_op_index,
'is_test': instance._is_test,
'program_id': _hash_with_id(trace_program)
})
# NOTE: [ why need set param's gradient type here ]
# if user set sparse gradient mode, the param's gradient
# will be SelectedRows, not LoDTensor. But tracer will just
# set param grad VarBase by forward VarBase(LoDTensor)
# If we don't change grad_var type here, RunProgramOp need
# transform SelectedRows to LoDTensor forcibly, it may not
# be user wanted result.
for persistable_var in persistable_vars:
grad_var_name = var.name + core.grad_var_suffix()
grad_var = trace_program.block(0).find_var(cpt.to_bytes(grad_var_name))
# NOTE: cannot find var desc maybe not problem,
# such as in batch_norm
if grad_var is None:
continue
persistable_var._set_grad_type(grad_var.type())
drop_scope_if_no_grad(instance, tmp_scope_vec)
# 3. prepare output, keep same form with inputs
outs = output_vars
if len(output_vars) == 1:
outs = output_vars[0]
return outs
def drop_scope_if_no_grad(instance, scope_vec):
tracer = framework._dygraph_tracer()
if (not instance._is_test) and (not tracer._has_grad):
scope_vec.value().get_scope().drop_kids()
def _run_static_graph(input, program_holder, trace_program):
main_program = framework.default_main_program()
param_var_names = _get_persistable_var_names(trace_program)
_, dict_rename_var_old_new = _rename_var_program_desc(
trace_program, exclude=param_var_names)
trace_program.flush()
output_names = [var.name() for var in program_holder.output_descs]
# append blocks from 'trace_program'
_append_block(main_program, trace_program, program_holder, input,
dict_rename_var_old_new)
main_program._sync_with_cpp()
outs = _get_output_from_program(main_program, program_holder,
dict_rename_var_old_new)
if len(outs) == 1:
outs = outs[0]
return outs
def _collect_current_and_parent_var(program, block_idx):
'''
Get variables in current block and its parent block.
Args:
program(Program): The program containing the current block.
block_idx(int): index of current block.
Returns:
List: list of variables.
'''
vars = []
if block_idx < 0:
return vars
for var in program.block(block_idx).vars:
vars.append(var)
parent_idx = program.block(block_idx).parent_idx
if parent_idx > -1:
vars += _collect_current_and_parent_var(program, parent_idx)
return vars
def _append_block(dest_program,
src_program_desc,
program_holder,
input_variables,
dict_rename_var_old_new=None):
'''
Append Variables and Operators in 'src_program_desc' to dest_program.
Args:
dest_program(Program): Variables and Operators are appended to it.
src_program_desc(ProgramDesc): Variables in it will be appended to 'dest_program'.
program_holder(_ProgramHolder): program_holder of TranslatedLayer
input_variables(list): list of input variables
dict_rename_var_old_new(None|dict): When using '_rename_var_program_desc',
use it to map the name of the variable before it was modified and the new name.
'''
origin_block_idx = dest_program.current_block_idx
param_var_names = _collect_current_and_parent_var(dest_program,
origin_block_idx)
append_var_from_block_desc_static(
dest_program.block(origin_block_idx),
src_program_desc.block(0),
exclude=param_var_names)
name_inp_desc = [inp.name() for inp in program_holder.input_descs]
input_names = [inp.name for inp in input_variables]
if len(name_inp_desc) != len(input_names):
raise ValueError(
"The number of input is invalid, expected {}, but received {}.".
format(len(name_inp_desc), len(input_names)))
for i, out_name in enumerate(name_inp_desc):
if dict_rename_var_old_new:
out_name = dict_rename_var_old_new[out_name]
dest_program.block(origin_block_idx).append_op(
type='assign',
inputs={'X': [input_names[i]]},
outputs={'Out': [out_name]})
append_ops = append_op_from_block_desc_static(
dest_program.block(origin_block_idx), src_program_desc.block(0))
dest_program._sync_with_cpp()
offset_block_idx = dest_program.num_blocks - 1
if src_program_desc.num_blocks() > 1:
for src_block_idx in range(1, src_program_desc.num_blocks()):
src_block = src_program_desc.block(src_block_idx)
src_parent_idx = src_block.parent
if src_parent_idx > 0:
parent_idx = offset_block_idx + parent_idx
else:
parent_idx = origin_block_idx
dest_block = dest_program._create_block(parent_idx=parent_idx)
append_var_from_block_desc_static(
dest_block, src_block, exclude=param_var_names)
append_ops += append_op_from_block_desc_static(dest_block,
src_block)
dest_program._sync_with_cpp()
for op in append_ops:
if op.has_attr('sub_block'):
sub = op.attr('sub_block')
if isinstance(sub, framework.core.BlockDesc):
origin_id = sub.id
if isinstance(sub, framework.Block):
origin_id = sub.idx
op._set_attr('sub_block',
dest_program.block(offset_block_idx + origin_id))
dest_program._sync_with_cpp()
dest_program.current_block_idx = origin_block_idx
def _get_output_from_program(program,
program_holder,
dict_rename_var_old_new=None):
"""
Get output name of 'program' according to program_holder
"""
outs = list()
for var in program_holder.output_descs:
for idx in range(program.num_blocks):
vars = program.block(idx).vars
var_name = var.name()
if dict_rename_var_old_new:
var_name = dict_rename_var_old_new[var_name]
if var_name in vars:
out = vars[var_name]
if out not in outs:
outs.append(out)
return outs
def append_op_from_block_desc_static(block, src_block_desc):
"""
Append Operators of 'src_block_desc' to current block.
Args:
block(Block): append OP of 'src_block_desc' to it.
src_block_desc(BlockDesc): append var of 'src_block_desc'
Returns:
List: list of the OP that are append to current block.
"""
ops = []
for i in range(src_block_desc.op_size()):
ops.append(append_op_from_desc_static(block, src_block_desc.op(i)))
return ops
def append_op_from_desc_static(block, op_desc):
"""
Append Operators to 'block' according to 'op_desc'.
Args:
block(Block): append OP of 'src_block_desc' to it.
op_desc(OpDesc): create OP according to it.
Returns:
Operator: OP appended to 'block'.
"""
op_type = op_desc.type()
op_append = block.desc.append_op()
op_append.copy_from(op_desc)
op = framework.Operator(
block=block,
desc=op_append,
type=op_type,
inputs=None,
outputs=None,
attrs=None)
block.ops.append(op)
return op
def append_var_from_block_desc_static(block,
src_block_desc,
include=None,
exclude=None):
"""
Append Variables of 'src_block_desc' to current block.
If 'include' is not `None`,variables that are not in include are not append.
If 'exclude' is not `None`,variables that are in exclude will are not append.
Args:
block(Block): append Variables of 'src_block_desc' to it.
src_block_desc(BlockDesc): append var of 'src_block_desc'
include(List):list of names of variables
exclude(List):list of names of variables
Returns:
List: list of the variables that are append to current block.
"""
vars_append = []
for var_desc in src_block_desc.all_vars():
var_desc_name = var_desc.name()
should_append = (include is None or var_desc_name in include) and (
exclude is None or var_desc_name not in exclude)
if not block.has_var(var_desc_name) and should_append:
var_type = var_desc.type()
if var_type in [
core.VarDesc.VarType.SELECTED_ROWS,
core.VarDesc.VarType.LOD_TENSOR,
core.VarDesc.VarType.LOD_TENSOR_ARRAY
]:
data_type = var_desc.dtype()
var_shape = var_desc.shape()
else:
data_type = None
var_shape = None
if var_type in [
core.VarDesc.VarType.LOD_TENSOR,
core.VarDesc.VarType.LOD_TENSOR_ARRAY
]:
lod_level = var_desc.lod_level()
else:
lod_level = None
if var_desc.persistable():
current_block = block.program.global_block()
else:
current_block = block
vars_append.append(
current_block.create_var(
name=var_desc.name(),
dtype=data_type,
type=var_type,
shape=var_shape,
lod_level=lod_level,
persistable=var_desc.persistable(),
set_need_check_feed=var_desc.need_check_feed()))
return vars_append
class TranslatedLayer(layers.Layer):
"""
TranslatedLayer is a ``paddle.nn.Layer`` for holding the model
loaded by :ref:`api_paddle_jit_load` . It can be used like a
general Layer object in eval or train mode.
.. note:
The TranslatedLayer objects should not be created by constructor, it only can be loaded and constructed by :ref:`api_paddle_jit_load` .
Examples:
.. code-block:: python
import numpy as np
import paddle
import paddle.nn as nn
import paddle.optimizer as opt
BATCH_SIZE = 16
BATCH_NUM = 4
EPOCH_NUM = 4
IMAGE_SIZE = 784
CLASS_NUM = 10
# define a random dataset
class RandomDataset(paddle.io.Dataset):
def __init__(self, num_samples):
self.num_samples = num_samples
def __getitem__(self, idx):
image = np.random.random([IMAGE_SIZE]).astype('float32')
label = np.random.randint(0, CLASS_NUM - 1, (1, )).astype('int64')
return image, label
def __len__(self):
return self.num_samples
class LinearNet(nn.Layer):
def __init__(self):
super(LinearNet, self).__init__()
self._linear = nn.Linear(IMAGE_SIZE, CLASS_NUM)
@paddle.jit.to_static
def forward(self, x):
return self._linear(x)
def train(layer, loader, loss_fn, opt):
for epoch_id in range(EPOCH_NUM):
for batch_id, (image, label) in enumerate(loader()):
out = layer(image)
loss = loss_fn(out, label)
loss.backward()
opt.step()
opt.clear_grad()
print("Epoch {} batch {}: loss = {}".format(
epoch_id, batch_id, np.mean(loss.numpy())))
# 1. train & save model.
# create network
layer = LinearNet()
loss_fn = nn.CrossEntropyLoss()
adam = opt.Adam(learning_rate=0.001, parameters=layer.parameters())
# create data loader
dataset = RandomDataset(BATCH_NUM * BATCH_SIZE)
loader = paddle.io.DataLoader(dataset,
batch_size=BATCH_SIZE,
shuffle=True,
drop_last=True,
num_workers=2)
# train
train(layer, loader, loss_fn, adam)
# save
model_path = "linear.example.model"
paddle.jit.save(layer, model_path)
# 2. load model as TranslatedLayer
# load
translated_layer = paddle.jit.load(model_path)
# inference
translated_layer.eval()
x = paddle.randn([1, IMAGE_SIZE], 'float32')
pred = translated_layer(x)
# fine-tune
translated_layer.train()
adam = opt.Adam(learning_rate=0.001, parameters=translated_layer.parameters())
train(translated_layer, loader, loss_fn, adam)
"""
def __init__(self, programs, persistable_vars):
super(TranslatedLayer, self).__init__()
if not isinstance(programs, dict):
raise TypeError(
"TranslatedLayer need to use _ProgramHolder's dict for initialization."
)
if not isinstance(persistable_vars, dict):
raise TypeError(
"TranslatedLayer need to use persistable variable dict for initialization."
)
self._program_holder_dict = programs
# NOTE(chenweihang): [ why not use var name directly? ]
# When add parameter or buffer to Layer by follow apis,
# the variable name can't contain `.`, beccause which may cause
# AttributeError when access the newly added parameter or buffer
# in the form of `self.**.**``, but the ParamBase or BarBase
# name contains `.` originally, such as `linear_0.w_0`, so here
# need to generate new var name for each var
self._persistable_var_name_dict = dict()
# the TranslatedLayer object holded var names count started from 0
with unique_name.guard():
for name, var in persistable_vars.items():
if isinstance(var, framework.ParamBase):
dy_name = _generate_unique_var_name(PARAMETER_NAME_PREFIX)
self._persistable_var_name_dict[name] = dy_name
self.add_parameter(dy_name, var)
elif isinstance(var, core.VarBase):
dy_name = _generate_unique_var_name(BUFFER_NAME_PREFIX)
self._persistable_var_name_dict[name] = dy_name
self.register_buffer(dy_name, var)
else:
raise TypeError(
"Adding persistent variable which to layer is not supported now"
)
self._is_test = True
self._input_args_names = None
@staticmethod
@framework.dygraph_only
def _construct(model_path, configs=None):
# 0. dir and filename check
model_path = os.path.normpath(model_path)
if not os.path.isdir(model_path):
raise ValueError("There is no directory named '%s'" % model_path)
model_filename = None
params_filename = None
if configs is not None:
model_filename = configs.model_filename
params_filename = configs.params_filename
# 1. load program desc & construct _ProgramHolder
programs = _construct_program_holders(model_path, model_filename)
# 2. load layer parameters & buffers
persistable_vars = _construct_params_and_buffers(model_path, programs,
params_filename)
# 3. construct TranslatedLayer object
translated_layer = TranslatedLayer(programs, persistable_vars)
# 4. create TranslatedLayer's execution method
for method_name, program_holder in programs.items():
if translated_layer._input_args_names is None:
translated_layer._input_args_names = [
ins.name() for ins in program_holder.input_descs
]
setattr(TranslatedLayer, method_name,
TranslatedLayer._execution_method_creator(method_name,
program_holder))
# 5. set TranslatedLayer's default mode to eval
translated_layer.eval()
return translated_layer
@staticmethod
def _execution_method_creator(method_name, program_holder):
def __i_m_p_l__(self, *input):
program_holder = self._program_holder_dict[__i_m_p_l__.__name__]
# When using jit.save, it runs in static graph mode.
# Run in dynamic graph mode when the model is inferring.
if in_dygraph_mode():
return _run_dygraph(self, input, program_holder)
else:
# NOTE(weixin): [ why not use 'program_holder.infer_program' directly? ]
# When use '_run_static_graph(input, program_holder, program_holder.infer_program)',
# because '_run_static_graph' modifies 'ProgramDesc', 'OpDesc.op_size()' will return a very large wrong number.
# A Segmentation fault error may occur if used 'p=ProgramDesc(program_holder.infer_program)'.
p = framework.Program._construct_from_desc(
core.ProgramDesc(program_holder.infer_program))
return _run_static_graph(input, program_holder, p.desc)
__i_m_p_l__.__name__ = method_name
return __i_m_p_l__
def train(self):
self._is_test = False
self.training = True
def eval(self):
self._is_test = True
self.training = False
def program(self, method_name='forward'):
"""
Gets translated program of specified method.
Args:
- method_name (string): mehtod name corresponding to the program
to be obtained. Default: 'forward'.
Returns:
Program
Examples:
.. code-block:: python
import numpy as np
import paddle
import paddle.nn as nn
import paddle.optimizer as opt
BATCH_SIZE = 16
BATCH_NUM = 4
EPOCH_NUM = 4
IMAGE_SIZE = 784
CLASS_NUM = 10
# define a random dataset
class RandomDataset(paddle.io.Dataset):
def __init__(self, num_samples):
self.num_samples = num_samples
def __getitem__(self, idx):
image = np.random.random([IMAGE_SIZE]).astype('float32')
label = np.random.randint(0, CLASS_NUM - 1, (1, )).astype('int64')
return image, label
def __len__(self):
return self.num_samples
class LinearNet(nn.Layer):
def __init__(self):
super(LinearNet, self).__init__()
self._linear = nn.Linear(IMAGE_SIZE, CLASS_NUM)
@paddle.jit.to_static
def forward(self, x):
return self._linear(x)
def train(layer, loader, loss_fn, opt):
for epoch_id in range(EPOCH_NUM):
for batch_id, (image, label) in enumerate(loader()):
out = layer(image)
loss = loss_fn(out, label)
loss.backward()
opt.step()
opt.clear_grad()
print("Epoch {} batch {}: loss = {}".format(
epoch_id, batch_id, np.mean(loss.numpy())))
# create network
layer = LinearNet()
loss_fn = nn.CrossEntropyLoss()
adam = opt.Adam(learning_rate=0.001, parameters=layer.parameters())
# create data loader
dataset = RandomDataset(BATCH_NUM * BATCH_SIZE)
loader = paddle.io.DataLoader(dataset,
batch_size=BATCH_SIZE,
shuffle=True,
drop_last=True,
num_workers=2)
# train
train(layer, loader, loss_fn, adam)
# save
model_path = "linear.example.model"
paddle.jit.save(layer, model_path)
# load
translated_layer = paddle.jit.load(model_path)
# get program
program = translated_layer.program()
"""
# 1. get program holder
program_holder = self._get_program_holder(method_name)
# 2. get inference program desc
program_desc = program_holder.infer_program
# 3. construct program
program = _build_program_by_desc(program_desc)
return program
def _get_program_holder(self, method_name='forward'):
program_holder = self._program_holder_dict.get(method_name, None)
if program_holder is None:
raise ValueError(
"The method `%s` does not exist in loaded TranslatedLayer." %
method_name)
return program_holder
def _input_spec(self, method_name='forward'):
# 1. get program holder
program_holder = self._get_program_holder(method_name)
# 2. build input spec by input desc
input_spec = []
for var_desc in program_holder.input_descs:
spec = paddle.static.InputSpec(
shape=var_desc.shape(),
dtype=var_desc.dtype(),
name=var_desc.name())
input_spec.append(spec)
return input_spec
def _output_spec(self, method_name='forward'):
# 1. get program holder
program_holder = self._get_program_holder(method_name)
# 2. build output spec by output desc
output_spec = []
for var_desc in program_holder.output_descs:
# NOTE(chenweihang): InputSpec describes a tensor, not just input.
# Maybe the name is not good enough. Here we use InputSpec to
# construct the description of Output tensor
spec = paddle.static.InputSpec(
shape=var_desc.shape(),
dtype=var_desc.dtype(),
name=var_desc.name())
output_spec.append(spec)
return output_spec
|
luotao1/Paddle
|
python/paddle/fluid/dygraph/io.py
|
Python
|
apache-2.0
| 56,803
|
import re
import SourceModel.SM_CaseStmt
import SourceModel.SM_Class
import SourceModel.SM_Constants as SMCONSTS
import SourceModel.SM_Define
import SourceModel.SM_Define
import SourceModel.SM_Element
import SourceModel.SM_Exec
import SourceModel.SM_FileResource
import SourceModel.SM_IfStmt
import SourceModel.SM_IncludeResource
import SourceModel.SM_LCOM
import SourceModel.SM_Node
import SourceModel.SM_PackageResource
import SourceModel.SM_ServiceResource
import SourceModel.SM_User
from SmellDetector import Utilities
class SM_File:
def __init__(self, file=""):
if file != "":
curFile = open(file, 'rt', errors='ignore')
self.fileText = curFile.read()
self.resourceBodyText = self.fileText
self.fileName = file
curFile.close()
def setText(self, text):
self.fileText = text
def getNoOfClassDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.CLASS_REGEX, "class")
def getNoOfDefineDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.DEFINE_REGEX, "define")
def getNoOfFileDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.FILE_REGEX, "file")
def getNoOfPackageDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.PACKAGE_REGEX, "package")
def getNoOfServiceDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.SERVICE_REGEX, "service")
def getNoOfExecDeclarations(self):
return self.countEntityDeclaration(SMCONSTS.EXEC_REGEX, "exec")
def getLinesOfCode(self):
counter = self.countEntityDeclaration(SMCONSTS.LOC_REGEX, "newLine")
if counter > 0:
return counter+1
if (len(self.fileText) > 0):
return 1
return 0
def getLinesOfCodeWithoutComments(self):
totalLines = self.getLinesOfCode()
totalCommentsLines = self.getLinesOfComments()
return totalLines - totalCommentsLines
def getLinesOfComments(self):
counter = self.countEntityDeclaration(SMCONSTS.COMMENT_REGEX, "newLine")
return counter
def countEntityDeclaration(self, regEx, entityType):
compiledRE = re.compile(regEx)
Utilities.myPrint("Identified " + entityType + " declarations: " + str(compiledRE.findall(self.fileText)) + \
" Size: " + str(len(compiledRE.findall(self.fileText))))
return len(compiledRE.findall(self.fileText))
def getFileResourceList(self):
compiledRE = re.compile(SMCONSTS.FILE_REGEX)
fileResourceList = []
for match in (compiledRE.findall(self.fileText)):
fileResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted file declaration: " + fileResourceText)
fileResourceObj = SourceModel.SM_FileResource.SM_FileResource(fileResourceText)
fileResourceList.append(fileResourceObj)
return fileResourceList
def extractResourceText(self, initialString):
index = self.fileText.find(initialString)
if index < 0:
return initialString
compiledRE1 = re.compile(r'\{')
compiledRE2 = re.compile(r'\}')
curBracketCount = len(compiledRE1.findall(initialString)) - len(compiledRE2.findall(initialString))
curIndex = index + len(initialString) + 1
if curBracketCount == 0:
#This is to find the first "{" since currently there is no { which may happen in case of multi-line def
found = False
while curIndex < len(self.fileText) and not found:
if self.fileText[curIndex] == '{':
found = True
curBracketCount = 1
curIndex += 1
while curBracketCount > 0 and curIndex < len(self.fileText):
if self.fileText[curIndex] == '}':
curBracketCount -= 1
if self.fileText[curIndex] == '{':
curBracketCount += 1
curIndex +=1
return self.fileText[index:curIndex]
def getServiceResourceList(self):
compiledRE = re.compile(SMCONSTS.SERVICE_REGEX)
serviceResourceList = []
for match in (compiledRE.findall(self.fileText)):
serviceResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted service declaration: " + serviceResourceText)
serviceResourceObj = SourceModel.SM_ServiceResource.SM_ServiceResource(serviceResourceText)
serviceResourceList.append(serviceResourceObj)
return serviceResourceList
def getPackageResourceList(self):
compiledRE = re.compile(SMCONSTS.PACKAGE_REGEX)
packageResourceList = []
for match in (compiledRE.findall(self.fileText)):
packageResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted package declaration: " + packageResourceText)
packageResourceObj = SourceModel.SM_PackageResource.SM_PackageResource(packageResourceText)
packageResourceList.append(packageResourceObj)
return packageResourceList
def getClassDeclarationList(self):
compiledRE = re.compile(SMCONSTS.CLASS_REGEX)
compiledClassNameRE = re.compile(SMCONSTS.CLASS_NAME_REGEX)
classList = []
for match in compiledRE.findall(self.fileText):
className = compiledClassNameRE.findall(match)[0]
#print("Class name: %s" % (className))
classText = self.extractResourceText(match)
Utilities.myPrint("Extracted class declaration: " + classText)
classObj = SourceModel.SM_Class.SM_Class(classText, className)
classList.append(classObj)
return classList
def getDefineDeclarationList(self):
compiledRE = re.compile(SMCONSTS.DEFINE_REGEX)
defineList = []
for match in compiledRE.findall(self.fileText):
defineText, s, e = self.extractElementText(match)
Utilities.myPrint("Extracted define declaration: " + defineText)
defineObj = SourceModel.SM_Define.SM_Define(defineText)
defineList.append(defineObj)
return defineList
def getLCOM(self):
return SourceModel.SM_LCOM.getLCOM(self.getOuterElementList())
def getBodyTextSize(self):
loc = self.getLinesOfCode()
return loc, len(self.resourceBodyText)
def getOuterClassList(self):
outerElementList = self.getOuterElementList()
classList = []
for element in outerElementList:
if type(element) is SourceModel.SM_Class.SM_Class:
classList.append(element)
return classList
def getOuterDefineList(self):
outerElementList = self.getOuterElementList()
defineList = []
for element in outerElementList:
if type(element) is SourceModel.SM_Define.SM_Define:
defineList.append(element)
return defineList
# exElementList = []
# exElementList.extend(self.getElementList(SMCONSTS.DEFINE_REGEX))
# filteredList = self.filterOutInnerElements(exElementList)
# return filteredList
def getOuterElementList(self):
exElementList = []
exElementList.extend(self.getElementList(SMCONSTS.CLASS_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.SERVICE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.CASE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.DEFINE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.EXEC_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.FILE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.IF_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.PACKAGE_REGEX))
exElementList.extend(self.getElementList(SMCONSTS.USER_REGEX))
filteredList = self.filterOutInnerElements(exElementList)
return filteredList
def getElementList(self, regex):
compiledRE = re.compile(regex)
exElementList = []
for str in (compiledRE.findall(self.fileText)):
elementText, startIndex, endIndex = self.extractElementText(str)
elementObj = self.getElementObject(elementText, regex)
exElementList.append(ExElement(elementObj, startIndex, endIndex))
return exElementList
# TODO: Handle variables
# Unwrap classes from list
def getIncludeClasses(self):
compiledIncludeRE = re.compile(SMCONSTS.DECLARE_INCLUDE_REGEX)
compiledResourceRE = re.compile(SMCONSTS.DECLARE_RESOURCE_REGEX)
declareClassList = []
declareClassName = ""
for match in (compiledIncludeRE.findall(self.fileText)):
#print(match)
declareClassText = match
cleanInclude = re.sub(r'^\s*include \[?(.+)\]?\s*$', r'\1', declareClassText)
#print("Clean include: %s" % cleanInclude)
class_name = r'(?:Class\[)?\'?\:{0,2}([\w\d\:\-_\$]+)\'?\]?'
classRE = re.compile(class_name)
if ',' in cleanInclude:
classes = cleanInclude.split(',')
for c in classes:
for m in classRE.findall(c):
# Find a variable value in text
if m.startswith('$'):
#print("Variable: %s" % m)
varRE = r'(?:^|\n)\s*\$[\w\d\-_]+\s?=\s?\'?\"?([\w\d\-_]+)\'?\"?\n'
compiledVarRE = re.compile(varRE)
for v in (compiledVarRE.findall(self.fileText)):
#print(v)
declareClassName = v
Utilities.myPrint("Extracted include class declaration: " + declareClassText)
declareResourceObj = SourceModel.SM_IncludeResource.SM_IncludeResource(declareClassText, declareClassName)
declareClassList.append(declareResourceObj)
break
#print("Variable %s value)
#print("Extracted class name: %s" % m)
else:
declareClassName = m
Utilities.myPrint("Extracted include class declaration: " + declareClassText)
declareResourceObj = SourceModel.SM_IncludeResource.SM_IncludeResource(declareClassText, declareClassName)
declareClassList.append(declareResourceObj)
else:
for c in classRE.findall(cleanInclude):
#print("Extracted class name: %s" % c)
declareClassName = c
#print("%s" % includeClassText)
Utilities.myPrint("Extracted include class declaration: " + declareClassText)
declareResourceObj = SourceModel.SM_IncludeResource.SM_IncludeResource(declareClassText, declareClassName)
declareClassList.append(declareResourceObj)
for match in (compiledResourceRE.findall(self.fileText)):
#print(match)
declareClassText = match
declareClassName = declareClassText
#print("%s" % includeClassText)
Utilities.myPrint("Extracted resource class declaration: " + declareClassText)
declareResourceObj = SourceModel.SM_IncludeResource.SM_IncludeResource(declareClassText, declareClassName)
declareClassList.append(declareResourceObj)
return declareClassList
def extractElementText(self, initialString):
compiledRE1 = re.compile(r'\{')
compiledRE2 = re.compile(r'\}')
curBracketCount = len(compiledRE1.findall(initialString)) - len(compiledRE2.findall(initialString))
index = self.fileText.find(initialString)
if index < 0:
return initialString, 0, len(initialString)
curIndex = index + len(initialString) + 1
if curBracketCount == 0:
#And now we need to find the corresponding ')' to avoid any errors where curly brackets are matched
#in the parameters itself.
found = False
while curIndex < len(self.fileText) and not found:
if self.fileText[curIndex] == ')':
found = True
curIndex +=1
#This is to find the first "{" since currently there is no { which may happen in case of multi-line class def
found = False
while curIndex < len(self.fileText) and not found:
if self.fileText[curIndex] == '{':
found = True
curBracketCount = 1
curIndex += 1
while curBracketCount > 0 and curIndex < len(self.fileText):
if self.fileText[curIndex] == '}':
curBracketCount -= 1
if self.fileText[curIndex] == '{':
curBracketCount += 1
curIndex +=1
return self.fileText[index:curIndex], index, curIndex
def getElementObject(self, elementText, regex):
if regex == SMCONSTS.CLASS_REGEX:
return SourceModel.SM_Class.SM_Class(elementText)
if regex == SMCONSTS.DEFINE_REGEX:
return SourceModel.SM_Define.SM_Define(elementText)
if regex == SMCONSTS.EXEC_REGEX:
return SourceModel.SM_Exec.SM_Exec(elementText)
if regex == SMCONSTS.FILE_REGEX:
return SourceModel.SM_FileResource.SM_FileResource(elementText)
if regex == SMCONSTS.PACKAGE_REGEX:
return SourceModel.SM_PackageResource.SM_PackageResource(elementText)
if regex == SMCONSTS.SERVICE_REGEX:
return SourceModel.SM_ServiceResource.SM_ServiceResource(elementText)
if regex == SMCONSTS.DECLARE_INCLUDE_REGEX or regex == SMCONSTS.DECLARE_RESOURCE_REGEX:
return SourceModel.SM_IncludeResource.SM_IncludeResource(elementText)
if regex == SMCONSTS.IF_REGEX:
return SourceModel.SM_IfStmt.SM_IfStmt(elementText)
if regex == SMCONSTS.CASE_REGEX:
return SourceModel.SM_CaseStmt.SM_CaseStmt(elementText)
if regex == SMCONSTS.USER_REGEX:
return SourceModel.SM_User.SM_User(elementText)
def sort(self, exClassElementList):
result = []
while len(exClassElementList) > 0:
largest = self.findLargest(exClassElementList)
result.append(largest)
exClassElementList.remove(largest)
return result
def findLargest(self, exClassElementList):
if len(exClassElementList) > 0:
largest = exClassElementList[0]
for item in exClassElementList:
if (item.endIndex - item.startIndex) > (largest.endIndex - item.startIndex):
largest = item
return largest
def filterOutInnerElements(self, exClassElementList):
filteredList = []
exClassElementList = self.sort(exClassElementList)
for element in exClassElementList:
found = False
for filteredItem in filteredList:
if element.startIndex >= filteredItem.startIndex and element.endIndex <= filteredItem.endIndex:
found = True
break
if found == False:
filteredList.append(element)
classElementList = []
for item in filteredList:
classElementList.append(item.elementObj)
return classElementList
def getMaxNestingDepth(self):
maxNestingDepth = 0
curIndex = 0
curBracketCount = 0
while curIndex < len(self.fileText):
if self.fileText[curIndex] == '}':
curBracketCount -= 1
if self.fileText[curIndex] == '{':
curBracketCount += 1
if curBracketCount > maxNestingDepth:
maxNestingDepth = curBracketCount
curIndex +=1
return maxNestingDepth
def getHardCodedStatments(self):
compiledRE = re.compile(SMCONSTS.HARDCODED_VALUE_REGEX)
hardCodedStmtList = compiledRE.findall(self.fileText)
filteredList = []
for item in hardCodedStmtList:
#print(item)
if not (item.__contains__("$") or item.__contains__("Package") or item.__contains__("Service") \
or item.__contains__("File")):
filteredList.append(item)
#print(filteredList)
return filteredList
def getClassHierarchyInfo(self):
classDecls = self.getClassDeclarationList()
classList = []
parentClassList = []
for aClass in classDecls:
classes, pClasses = aClass.getClassHierarchyInfo()
if len(classes) > 0:
classList.append(classes)
if len(pClasses) > 0:
parentClassList.append(pClasses)
return classList, parentClassList
def getNodeDeclarations(self):
compiledRE = re.compile(SMCONSTS.NODE_REGEX)
nodeResourceList = []
for match in (compiledRE.findall(self.fileText)):
nodeResourceText = self.extractResourceText(match)
Utilities.myPrint("Extracted node declaration: " + nodeResourceText)
nodeResourceObj = SourceModel.SM_Node.SM_Node(nodeResourceText)
nodeResourceList.append(nodeResourceObj)
return nodeResourceList
class ExElement(object):
def __init__(self, elementObj, startIndex, endIndex):
self.elementObj = elementObj
self.startIndex = startIndex
self.endIndex = endIndex
|
tushartushar/Puppeteer
|
SourceModel/SM_File.py
|
Python
|
apache-2.0
| 17,577
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaebusiness.gaeutil import SaveCommand, ModelSearchCommand
from gaeforms.ndb.form import ModelForm
from gaegraph.business_base import UpdateNode
from categoria.model import Categoria
class CategoriaForm(ModelForm):
_model_class = Categoria
_include = [Categoria.nome, Categoria.categoria_pai]
|
iwilliam317/tekton
|
backend/apps/categoria/validation.py
|
Python
|
mit
| 382
|
from __future__ import division
import numpy as np
from numpy.testing import run_module_suite
from scipy.sparse import csr_matrix
from sklearn.utils.testing import (assert_array_equal, assert_almost_equal,
assert_false, assert_raises, assert_equal,
assert_allclose, assert_greater)
from sklearn.feature_selection.mutual_info_ import (
mutual_info_regression, mutual_info_classif, _compute_mi)
def test_compute_mi_dd():
# In discrete case computations are straightforward and can be done
# by hand on given vectors.
x = np.array([0, 1, 1, 0, 0])
y = np.array([1, 0, 0, 0, 1])
H_x = H_y = -(3/5) * np.log(3/5) - (2/5) * np.log(2/5)
H_xy = -1/5 * np.log(1/5) - 2/5 * np.log(2/5) - 2/5 * np.log(2/5)
I_xy = H_x + H_y - H_xy
assert_almost_equal(_compute_mi(x, y, True, True), I_xy)
def test_compute_mi_cc():
# For two continuous variables a good approach is to test on bivariate
# normal distribution, where mutual information is known.
# Mean of the distribution, irrelevant for mutual information.
mean = np.zeros(2)
# Setup covariance matrix with correlation coeff. equal 0.5.
sigma_1 = 1
sigma_2 = 10
corr = 0.5
cov = np.array([
[sigma_1**2, corr * sigma_1 * sigma_2],
[corr * sigma_1 * sigma_2, sigma_2**2]
])
# True theoretical mutual information.
I_theory = (np.log(sigma_1) + np.log(sigma_2) -
0.5 * np.log(np.linalg.det(cov)))
np.random.seed(0)
Z = np.random.multivariate_normal(mean, cov, size=1000)
x, y = Z[:, 0], Z[:, 1]
# Theory and computed values won't be very close, assert that the
# first figures after decimal point match.
for n_neighbors in [3, 5, 7]:
I_computed = _compute_mi(x, y, False, False, n_neighbors)
assert_almost_equal(I_computed, I_theory, 1)
def test_compute_mi_cd():
# To test define a joint distribution as follows:
# p(x, y) = p(x) p(y | x)
# X ~ Bernoulli(p)
# (Y | x = 0) ~ Uniform(-1, 1)
# (Y | x = 1) ~ Uniform(0, 2)
# Use the following formula for mutual information:
# I(X; Y) = H(Y) - H(Y | X)
# Two entropies can be computed by hand:
# H(Y) = -(1-p)/2 * ln((1-p)/2) - p/2*log(p/2) - 1/2*log(1/2)
# H(Y | X) = ln(2)
# Now we need to implement sampling from out distribution, which is
# done easily using conditional distribution logic.
n_samples = 1000
np.random.seed(0)
for p in [0.3, 0.5, 0.7]:
x = np.random.uniform(size=n_samples) > p
y = np.empty(n_samples)
mask = x == 0
y[mask] = np.random.uniform(-1, 1, size=np.sum(mask))
y[~mask] = np.random.uniform(0, 2, size=np.sum(~mask))
I_theory = -0.5 * ((1 - p) * np.log(0.5 * (1 - p)) +
p * np.log(0.5 * p) + np.log(0.5)) - np.log(2)
# Assert the same tolerance.
for n_neighbors in [3, 5, 7]:
I_computed = _compute_mi(x, y, True, False, n_neighbors)
assert_almost_equal(I_computed, I_theory, 1)
def test_compute_mi_cd_unique_label():
# Test that adding unique label doesn't change MI.
n_samples = 100
x = np.random.uniform(size=n_samples) > 0.5
y = np.empty(n_samples)
mask = x == 0
y[mask] = np.random.uniform(-1, 1, size=np.sum(mask))
y[~mask] = np.random.uniform(0, 2, size=np.sum(~mask))
mi_1 = _compute_mi(x, y, True, False)
x = np.hstack((x, 2))
y = np.hstack((y, 10))
mi_2 = _compute_mi(x, y, True, False)
assert_equal(mi_1, mi_2)
# We are going test that feature ordering by MI matches our expectations.
def test_mutual_info_classif_discrete():
X = np.array([[0, 0, 0],
[1, 1, 0],
[2, 0, 1],
[2, 0, 1],
[2, 0, 1]])
y = np.array([0, 1, 2, 2, 1])
# Here X[:, 0] is the most informative feature, and X[:, 1] is weakly
# informative.
mi = mutual_info_classif(X, y, discrete_features=True)
assert_array_equal(np.argsort(-mi), np.array([0, 2, 1]))
def test_mutual_info_regression():
# We generate sample from multivariate normal distribution, using
# transformation from initially uncorrelated variables. The zero
# variables after transformation is selected as the target vector,
# it has the strongest correlation with the variable 2, and
# the weakest correlation with the variable 1.
T = np.array([
[1, 0.5, 2, 1],
[0, 1, 0.1, 0.0],
[0, 0.1, 1, 0.1],
[0, 0.1, 0.1, 1]
])
cov = T.dot(T.T)
mean = np.zeros(4)
np.random.seed(0)
Z = np.random.multivariate_normal(mean, cov, size=1000)
X = Z[:, 1:]
y = Z[:, 0]
mi = mutual_info_regression(X, y, random_state=0)
assert_array_equal(np.argsort(-mi), np.array([1, 2, 0]))
def test_mutual_info_classif_mixed():
# Here the target is discrete and there are two continuous and one
# discrete feature. The idea of this test is clear from the code.
np.random.seed(0)
X = np.random.rand(1000, 3)
X[:, 1] += X[:, 0]
y = ((0.5 * X[:, 0] + X[:, 2]) > 0.5).astype(int)
X[:, 2] = X[:, 2] > 0.5
mi = mutual_info_classif(X, y, discrete_features=[2], n_neighbors=3,
random_state=0)
assert_array_equal(np.argsort(-mi), [2, 0, 1])
for n_neighbors in [5, 7, 9]:
mi_nn = mutual_info_classif(X, y, discrete_features=[2],
n_neighbors=n_neighbors, random_state=0)
# Check that the continuous values have an higher MI with greater
# n_neighbors
assert_greater(mi_nn[0], mi[0])
assert_greater(mi_nn[1], mi[1])
# The n_neighbors should not have any effect on the discrete value
# The MI should be the same
assert_equal(mi_nn[2], mi[2])
def test_mutual_info_options():
X = np.array([[0, 0, 0],
[1, 1, 0],
[2, 0, 1],
[2, 0, 1],
[2, 0, 1]], dtype=float)
y = np.array([0, 1, 2, 2, 1], dtype=float)
X_csr = csr_matrix(X)
for mutual_info in (mutual_info_regression, mutual_info_classif):
assert_raises(ValueError, mutual_info_regression, X_csr, y,
discrete_features=False)
mi_1 = mutual_info(X, y, discrete_features='auto', random_state=0)
mi_2 = mutual_info(X, y, discrete_features=False, random_state=0)
mi_3 = mutual_info(X_csr, y, discrete_features='auto',
random_state=0)
mi_4 = mutual_info(X_csr, y, discrete_features=True,
random_state=0)
assert_array_equal(mi_1, mi_2)
assert_array_equal(mi_3, mi_4)
assert_false(np.allclose(mi_1, mi_3))
if __name__ == '__main__':
run_module_suite()
|
Titan-C/scikit-learn
|
sklearn/feature_selection/tests/test_mutual_info.py
|
Python
|
bsd-3-clause
| 6,881
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# api-feiras-livres documentation build configuration file, created by
# sphinx-quickstart on Tue Mar 28 03:04:19 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_parsers = {
'.md': 'recommonmark.parser.CommonMarkParser',
}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'api-feiras-livres'
copyright = '2017, Samuel Sampaio <samukasmk@gmail.com>'
author = 'Samuel Sampaio <samukasmk@gmail.com>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'pt-br'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'api-feiras-livresdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'api-feiras-livres.tex', 'api-feiras-livres Documentation',
'Samuel Sampaio \\textless{}samukasmk@gmail.com\\textgreater{}', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'api-feiras-livres', 'api-feiras-livres Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'api-feiras-livres', 'api-feiras-livres Documentation',
author, 'api-feiras-livres', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
# epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
samukasmk/api-feiras-livres
|
docs/conf.py
|
Python
|
apache-2.0
| 5,943
|
# -*- test-case-name: txweb2.dav.test.test_copy,twext.web2.dav.test.test_move -*-
##
# Copyright (c) 2005-2017 Apple Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# DRI: Wilfredo Sanchez, wsanchez@apple.com
##
"""
WebDAV COPY and MOVE methods.
"""
__all__ = ["http_COPY", "http_MOVE"]
from twisted.internet.defer import waitForDeferred, deferredGenerator
from twext.python.log import Logger
from txweb2 import responsecode
from txweb2.dav.fileop import move
from txweb2.http import HTTPError, StatusResponse
from txweb2.filter.location import addLocation
from txdav.xml import element as davxml
from txweb2.dav.idav import IDAVResource
from txweb2.dav.method import put_common
from txweb2.dav.util import parentForURL
# FIXME: This is circular
import txweb2.dav.static
log = Logger()
def http_COPY(self, request):
"""
Respond to a COPY request. (RFC 2518, section 8.8)
"""
r = waitForDeferred(prepareForCopy(self, request))
yield r
r = r.getResult()
destination, destination_uri, depth = r
#
# Check authentication and access controls
#
x = waitForDeferred(self.authorize(request, (davxml.Read(),), recurse=True))
yield x
x.getResult()
if destination.exists():
x = waitForDeferred(destination.authorize(
request,
(davxml.WriteContent(), davxml.WriteProperties()),
recurse=True
))
yield x
x.getResult()
else:
destparent = waitForDeferred(request.locateResource(parentForURL(destination_uri)))
yield destparent
destparent = destparent.getResult()
x = waitForDeferred(destparent.authorize(request, (davxml.Bind(),)))
yield x
x.getResult()
# May need to add a location header
addLocation(request, destination_uri)
# x = waitForDeferred(copy(self.fp, destination.fp, destination_uri, depth))
x = waitForDeferred(put_common.storeResource(request,
source=self,
source_uri=request.uri,
destination=destination,
destination_uri=destination_uri,
deletesource=False,
depth=depth
))
yield x
yield x.getResult()
http_COPY = deferredGenerator(http_COPY)
def http_MOVE(self, request):
"""
Respond to a MOVE request. (RFC 2518, section 8.9)
"""
r = waitForDeferred(prepareForCopy(self, request))
yield r
r = r.getResult()
destination, destination_uri, depth = r
#
# Check authentication and access controls
#
parentURL = parentForURL(request.uri)
parent = waitForDeferred(request.locateResource(parentURL))
yield parent
parent = parent.getResult()
x = waitForDeferred(parent.authorize(request, (davxml.Unbind(),)))
yield x
x.getResult()
if destination.exists():
x = waitForDeferred(destination.authorize(
request,
(davxml.Bind(), davxml.Unbind()),
recurse=True
))
yield x
x.getResult()
else:
destparentURL = parentForURL(destination_uri)
destparent = waitForDeferred(request.locateResource(destparentURL))
yield destparent
destparent = destparent.getResult()
x = waitForDeferred(destparent.authorize(request, (davxml.Bind(),)))
yield x
x.getResult()
# May need to add a location header
addLocation(request, destination_uri)
#
# RFC 2518, section 8.9 says that we must act as if the Depth header is set
# to infinity, and that the client must omit the Depth header or set it to
# infinity.
#
# This seems somewhat at odds with the notion that a bad request should be
# rejected outright; if the client sends a bad depth header, the client is
# broken, and section 8 suggests that a bad request should be rejected...
#
# Let's play it safe for now and ignore broken clients.
#
if self.isCollection() and depth != "infinity":
msg = "Client sent illegal depth header value for MOVE: %s" % (depth,)
log.error(msg)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, msg))
# Lets optimise a move within the same directory to a new resource as a simple move
# rather than using the full transaction based storeResource api. This allows simple
# "rename" operations to work quickly.
if (not destination.exists()) and destparent == parent:
x = waitForDeferred(move(self.fp, request.uri, destination.fp, destination_uri, depth))
else:
x = waitForDeferred(put_common.storeResource(request,
source=self,
source_uri=request.uri,
destination=destination,
destination_uri=destination_uri,
deletesource=True,
depth=depth))
yield x
yield x.getResult()
http_MOVE = deferredGenerator(http_MOVE)
def prepareForCopy(self, request):
#
# Get the depth
#
depth = request.headers.getHeader("depth", "infinity")
if depth not in ("0", "infinity"):
msg = ("Client sent illegal depth header value: %s" % (depth,))
log.error(msg)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, msg))
#
# Verify this resource exists
#
if not self.exists():
log.error("File not found: %s" % (self,))
raise HTTPError(StatusResponse(
responsecode.NOT_FOUND,
"Source resource %s not found." % (request.uri,)
))
#
# Get the destination
#
destination_uri = request.headers.getHeader("destination")
if not destination_uri:
msg = "No destination header in %s request." % (request.method,)
log.error(msg)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, msg))
d = request.locateResource(destination_uri)
d.addCallback(_prepareForCopy, destination_uri, request, depth)
return d
def _prepareForCopy(destination, destination_uri, request, depth):
#
# Destination must be a DAV resource
#
try:
destination = IDAVResource(destination)
except TypeError:
log.error("Attempt to %s to a non-DAV resource: (%s) %s"
% (request.method, destination.__class__, destination_uri))
raise HTTPError(StatusResponse(
responsecode.FORBIDDEN,
"Destination %s is not a WebDAV resource." % (destination_uri,)
))
#
# FIXME: Right now we don't know how to copy to a non-DAVFile resource.
# We may need some more API in IDAVResource.
# So far, we need: .exists(), .fp.parent()
#
if not isinstance(destination, txweb2.dav.static.DAVFile):
log.error("DAV copy between non-DAVFile DAV resources isn't implemented")
raise HTTPError(StatusResponse(
responsecode.NOT_IMPLEMENTED,
"Destination %s is not a DAVFile resource." % (destination_uri,)
))
#
# Check for existing destination resource
#
overwrite = request.headers.getHeader("overwrite", True)
if destination.exists() and not overwrite:
log.error("Attempt to %s onto existing file without overwrite flag enabled: %s"
% (request.method, destination))
raise HTTPError(StatusResponse(
responsecode.PRECONDITION_FAILED,
"Destination %s already exists." % (destination_uri,)
))
#
# Make sure destination's parent exists
#
if not destination.parent().isCollection():
log.error("Attempt to %s to a resource with no parent: %s"
% (request.method, destination.fp.path))
raise HTTPError(StatusResponse(responsecode.CONFLICT, "No parent collection."))
return destination, destination_uri, depth
|
macosforge/ccs-calendarserver
|
txweb2/dav/method/copymove.py
|
Python
|
apache-2.0
| 9,316
|
n = abs(int(input()))
k = 0
b = 17
while n != 0:
if n % 17 == 16:
k += 1
n //= 17
print(k)
|
Senbjorn/mipt_lab_2016
|
contest_222/digits.py
|
Python
|
gpl-3.0
| 95
|
import os
from .conf import init_conf
class Local(object):
def __init__(self, conf='toraconf'):
self._conf = conf
@property
def conf(self):
try:
from flask import current_app
return current_app.config
except:
if isinstance(self._conf, str):
from flask import Config
conf = Config(os.getcwd())
init_conf(conf, self._conf)
self._conf = conf
return self._conf
@property
def secret_key(self):
try:
from flask import current_app
return current_app.secret_key
except:
return self.conf['SECRET_KEY']
local = Local()
|
Answeror/torabot
|
torabot/ut/local.py
|
Python
|
mit
| 724
|
"""
Custom Sphinx documentation module to link to parts of the OAuth2 draft.
"""
from docutils import nodes
base_url = "http://tools.ietf.org/html/rfc6749"
def rfclink(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to the OAuth2 draft.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
node = nodes.reference(rawtext, "Section " + text, refuri="%s#section-%s" % (base_url, text))
return [node], []
def setup(app):
"""
Install the plugin.
:param app: Sphinx application context.
"""
app.add_role('rfc', rfclink)
return
|
frasern/ADL_LRS
|
oauth2_provider/provider/sphinx.py
|
Python
|
apache-2.0
| 1,097
|
import sys
import unittest
from sure import expect
from social.utils import sanitize_redirect, user_is_authenticated, \
user_is_active, slugify, build_absolute_uri
PY3 = sys.version_info[0] == 3
class SanitizeRedirectTest(unittest.TestCase):
def test_none_redirect(self):
expect(sanitize_redirect('myapp.com', None)).to.equal(None)
def test_empty_redirect(self):
expect(sanitize_redirect('myapp.com', '')).to.equal(None)
def test_dict_redirect(self):
expect(sanitize_redirect('myapp.com', {})).to.equal(None)
def test_invalid_redirect(self):
expect(sanitize_redirect('myapp.com',
{'foo': 'bar'})).to.equal(None)
def test_wrong_path_redirect(self):
expect(sanitize_redirect(
'myapp.com',
'http://notmyapp.com/path/'
)).to.equal(None)
def test_valid_absolute_redirect(self):
expect(sanitize_redirect(
'myapp.com',
'http://myapp.com/path/'
)).to.equal('http://myapp.com/path/')
def test_valid_relative_redirect(self):
expect(sanitize_redirect('myapp.com', '/path/')).to.equal('/path/')
class UserIsAuthenticatedTest(unittest.TestCase):
def test_user_is_none(self):
expect(user_is_authenticated(None)).to.equal(False)
def test_user_is_not_none(self):
expect(user_is_authenticated(object())).to.equal(True)
def test_user_has_is_authenticated(self):
class User(object):
is_authenticated = True
expect(user_is_authenticated(User())).to.equal(True)
def test_user_has_is_authenticated_callable(self):
class User(object):
def is_authenticated(self):
return True
expect(user_is_authenticated(User())).to.equal(True)
class UserIsActiveTest(unittest.TestCase):
def test_user_is_none(self):
expect(user_is_active(None)).to.equal(False)
def test_user_is_not_none(self):
expect(user_is_active(object())).to.equal(True)
def test_user_has_is_active(self):
class User(object):
is_active = True
expect(user_is_active(User())).to.equal(True)
def test_user_has_is_active_callable(self):
class User(object):
def is_active(self):
return True
expect(user_is_active(User())).to.equal(True)
class SlugifyTest(unittest.TestCase):
def test_slugify_formats(self):
if PY3:
expect(slugify('FooBar')).to.equal('foobar')
expect(slugify('Foo Bar')).to.equal('foo-bar')
expect(slugify('Foo (Bar)')).to.equal('foo-bar')
else:
expect(slugify('FooBar'.decode('utf-8'))).to.equal('foobar')
expect(slugify('Foo Bar'.decode('utf-8'))).to.equal('foo-bar')
expect(slugify('Foo (Bar)'.decode('utf-8'))).to.equal('foo-bar')
class BuildAbsoluteURITest(unittest.TestCase):
def setUp(self):
self.host = 'http://foobar.com'
def tearDown(self):
self.host = None
def test_path_none(self):
expect(build_absolute_uri(self.host)).to.equal(self.host)
def test_path_empty(self):
expect(build_absolute_uri(self.host, '')).to.equal(self.host)
def test_path_http(self):
expect(build_absolute_uri(self.host, 'http://barfoo.com')) \
.to.equal('http://barfoo.com')
def test_path_https(self):
expect(build_absolute_uri(self.host, 'https://barfoo.com')) \
.to.equal('https://barfoo.com')
def test_host_ends_with_slash_and_path_starts_with_slash(self):
expect(build_absolute_uri(self.host + '/', '/foo/bar')) \
.to.equal('http://foobar.com/foo/bar')
def test_absolute_uri(self):
expect(build_absolute_uri(self.host, '/foo/bar')) \
.to.equal('http://foobar.com/foo/bar')
|
nvbn/python-social-auth
|
social/tests/test_utils.py
|
Python
|
bsd-3-clause
| 3,895
|
"""
Class for reading data from Neuralynx files.
This IO supports NCS, NEV and NSE file formats.
Depends on: numpy
Supported: Read
Author: Julia Sprenger, Carlos Canova
"""
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.neuralynxrawio.neuralynxrawio import NeuralynxRawIO
class NeuralynxIO(NeuralynxRawIO, BaseFromRaw):
"""
Class for reading data from Neuralynx files.
This IO supports NCS, NEV, NSE and NTT file formats.
NCS contains signals for one channel
NEV contains events
NSE contains spikes and waveforms for mono electrodes
NTT contains spikes and waveforms for tetrodes
"""
_prefered_signal_group_mode = 'group-by-same-units'
mode = 'dir'
def __init__(self, dirname='', filename='', use_cache=False, cache_path='same_as_resource',
exclude_filename=None, keep_original_times=False):
"""
Initialise IO instance
Parameters
----------
dirname : str
Directory containing data files
use_cache : bool, optional
Cache results of initial file scans for faster loading in subsequent runs.
Default: False
cache_path : str, optional
Folder path to use for cache files.
Default: 'same_as_resource'
exclude_filename: str or list
Filename or list of filenames to be excluded. Expects base filenames without
directory path.
keep_original_times : bool
Preserve original time stamps as in data files. By default datasets are
shifted to begin at t_start = 0*pq.second.
Default: False
"""
NeuralynxRawIO.__init__(self, dirname=dirname, filename=filename, use_cache=use_cache,
cache_path=cache_path, exclude_filename=exclude_filename,
keep_original_times=keep_original_times)
if self.rawmode == 'one-file':
BaseFromRaw.__init__(self, filename)
elif self.rawmode == 'one-dir':
BaseFromRaw.__init__(self, dirname)
|
samuelgarcia/python-neo
|
neo/io/neuralynxio.py
|
Python
|
bsd-3-clause
| 2,096
|
# 各組分別在各自的 .py 程式中建立應用程式 (第1步/總共3步)
from flask import Blueprint, render_template
# 利用 Blueprint建立 ag1, 並且 url 前綴為 /ag1, 並設定 template 存放目錄
scrum5_task40323208 = Blueprint('scrum5_task40323208', __name__, url_prefix='/bg4', template_folder='templates')
# scrum1_task1 為完整可以單獨執行的繪圖程式
@scrum5_task40323208.route('/scrum5_G')
def scrum5_G():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 畫軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
deg = math.pi/180
# 將繪製鏈條輪廓的內容寫成 class 物件
class chain():
# 輪廓的外型設為成員變數
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
# 利用鏈條起點與終點定義繪圖, 使用內定的 color, border 與 linewidth 變數
def basic(self, x1, y1, x2, y2, color="green", border=True, linewidth=4, scale=1):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
self.color = color
self.border = border
self.linewidth = linewidth
self.scale = scale
# 注意, cgo.Chamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(math.atan2(y2-y1, x2-x1)/deg+90)
# 放大 scale 倍
cgo.render(basic1, x1, y1, scale, 0)
# 利用鏈條起點與旋轉角度定義繪圖, 使用內定的 color, border 與 linewidth 變數
def basic_rot(self, x1, y1, rot, color="green", border=True, linewidth=4, scale=1):
self.x1 = x1
self.y1 = y1
self.rot = rot
self.color = color
self.border = border
self.linewidth = linewidth
self.scale = scale
# 注意, cgo.Chamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 根據旋轉角度, 計算 x2 與 y2
x2 = x1 + 20*math.cos(rot*deg)
y2 = y1 + 20*math.sin(rot*deg)
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(rot+90)
# 放大 scale 倍
cgo.render(basic1, x1, y1, scale, 0)
return x2, y2
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 畫 G
# 上半部
# 左邊中間垂直起點, 圓心位於線段中央, y 方向再向上平移兩個鏈條圓心距單位
x1, y1 = mychain.basic_rot(0+65*2, -75, 90, color="brown")
# 上方轉 80 度
x2, y2 = mychain.basic_rot(x1, y1, 80, color="brown")
# 上方轉 30 度
x3, y3 = mychain.basic_rot(x2, y2, 30, color="brown")
# 上方水平
x4, y4 = mychain.basic_rot(x3, y3, 0, color="brown")
# 下半部, 從起點開始 -80 度
x5, y5 = mychain.basic_rot(0+65*2, -75, -80, color="brown")
# 下斜 -30 度
x6, y6 = mychain.basic_rot(x5, y5, -30, color="brown")
# 下方水平單元
x7, y7 = mychain.basic_rot(x6, y6, -0, color="brown")
# 下方垂直單元
x8, y8 = mychain.basic_rot(x7, y7, 90, color="brown")
# 下方垂直單元
x9, y9 = mychain.basic_rot(x8, y8, 90, color="brown")
# 中間水平單元
x10, y10 = mychain.basic_rot(x9, y9, 180, color="brown")
</script>
<!-- 以協同方式加上 ag100 的 scrum-2 組員所寫的 task1 程式碼 -->
<!-- <script type="text/python" src="/bg4/scrum2_task1"></script>
<!-- 以協同方式加上 ag100 的 scrum-3 組員所寫的 task1 程式碼 -->
<!-- <script type="text/python" src="/bg4/scrum3_task1"></script>
</body>
</html>
'''
return outstring
|
2015fallhw/cdw2
|
users/s2b/g4/40323208/scrum5_task40323208.py
|
Python
|
agpl-3.0
| 5,630
|
import pkg_resources
import unittest
def with_requires(*requirements):
"""Run a test case only when given requirements are satisfied.
.. admonition:: Example
This test case runs only when `numpy>=1.10` is installed.
>>> from chainer import testing
... class Test(unittest.TestCase):
... @testing.with_requires('numpy>=1.10')
... def test_for_numpy_1_10(self):
... pass
Args:
requirements: A list of string representing requirement condition to
run a given test case.
"""
ws = pkg_resources.WorkingSet()
try:
ws.require(*requirements)
skip = False
except pkg_resources.VersionConflict:
skip = True
msg = 'requires: {}'.format(','.join(requirements))
return unittest.skipIf(skip, msg)
|
AlpacaDB/chainer
|
chainer/testing/helper.py
|
Python
|
mit
| 829
|
import asyncio
from rx import Observable
from counter_rxt import frame, unframe, Router, CounterItem
class FramedTransport(object):
def __init__(self, transport):
self.transport = transport
def write(self, data):
self.transport.write(frame(data).encode())
class CounterServerProtocol(asyncio.Protocol):
def connection_made(self, transport):
peername = transport.get_extra_info('peername')
print('Connection from {}'.format(peername))
self.router = Router(FramedTransport(transport))
self.frame_context = ''
def connection_lost(self, exc):
print('connection lost')
return
def data_received(self, data):
message = data.decode()
self.frame_context, packets = unframe(self.frame_context, message)
for packet in packets:
self.router.on_message(packet)
def delete_counter_subscription(stream):
stream = None
def create_counter_stream(start, end, step):
source = Observable.from_(range(start,end+1, step)).map(
lambda i: CounterItem(i))
return lambda n,c,e: subscribe_counter_stream(source, n, c, e), lambda: delete_counter_subscription(source)
def subscribe_counter_stream(stream, next, completed, error):
stream.subscribe(
lambda i: next(i),
lambda e: error(e),
lambda: completed())
Router.set_Counter_factory(create_counter_stream)
loop = asyncio.get_event_loop()
# Each client connection will create a new protocol instance
coro = loop.create_server(CounterServerProtocol, '127.0.0.1', 9999)
server = loop.run_until_complete(coro)
print('Serving on {}'.format(server.sockets[0].getsockname()))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
|
rxtender/rxt-backend-base
|
example/counter/server.py
|
Python
|
mit
| 1,808
|
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import Group, AnonymousUser
from django.db import models
from guardian.compat import get_user_model
from guardian.testapp.tests.conf import skipUnlessTestApp
from guardian.testapp.tests.test_core import ObjectPermissionTestCase
from guardian.testapp.models import Project
from guardian.testapp.models import ProjectUserObjectPermission
from guardian.testapp.models import ProjectGroupObjectPermission
from guardian.models import UserObjectPermission
from guardian.models import UserObjectPermissionBase
from guardian.models import GroupObjectPermission
from guardian.utils import get_anonymous_user
from guardian.utils import get_identity
from guardian.utils import get_user_obj_perms_model
from guardian.utils import get_group_obj_perms_model
from guardian.utils import get_obj_perms_model
from guardian.exceptions import NotUserNorGroup
User = get_user_model()
class GetAnonymousUserTest(TestCase):
def test(self):
anon = get_anonymous_user()
self.assertTrue(isinstance(anon, User))
class GetIdentityTest(ObjectPermissionTestCase):
def test_user(self):
user, group = get_identity(self.user)
self.assertTrue(isinstance(user, User))
self.assertEqual(group, None)
def test_anonymous_user(self):
anon = AnonymousUser()
user, group = get_identity(anon)
self.assertTrue(isinstance(user, User))
self.assertEqual(group, None)
def test_group(self):
user, group = get_identity(self.group)
self.assertTrue(isinstance(group, Group))
self.assertEqual(user, None)
def test_not_user_nor_group(self):
self.assertRaises(NotUserNorGroup, get_identity, 1)
self.assertRaises(NotUserNorGroup, get_identity, "User")
self.assertRaises(NotUserNorGroup, get_identity, User)
@skipUnlessTestApp
class GetUserObjPermsModelTest(TestCase):
def test_for_instance(self):
project = Project(name='Foobar')
self.assertEqual(get_user_obj_perms_model(project),
ProjectUserObjectPermission)
def test_for_class(self):
self.assertEqual(get_user_obj_perms_model(Project),
ProjectUserObjectPermission)
def test_default(self):
self.assertEqual(get_user_obj_perms_model(ContentType),
UserObjectPermission)
def test_user_model(self):
# this test assumes that there were no direct obj perms model to User
# model defined (i.e. while testing guardian app in some custom
# project)
self.assertEqual(get_user_obj_perms_model(User),
UserObjectPermission)
@skipUnlessTestApp
class GetGroupObjPermsModelTest(TestCase):
def test_for_instance(self):
project = Project(name='Foobar')
self.assertEqual(get_group_obj_perms_model(project),
ProjectGroupObjectPermission)
def test_for_class(self):
self.assertEqual(get_group_obj_perms_model(Project),
ProjectGroupObjectPermission)
def test_default(self):
self.assertEqual(get_group_obj_perms_model(ContentType),
GroupObjectPermission)
def test_group_model(self):
# this test assumes that there were no direct obj perms model to Group
# model defined (i.e. while testing guardian app in some custom
# project)
self.assertEqual(get_group_obj_perms_model(Group),
GroupObjectPermission)
class GetObjPermsModelTest(TestCase):
def test_image_field(self):
class SomeModel(models.Model):
image = models.FileField(upload_to='images/')
obj = SomeModel()
perm_model = get_obj_perms_model(obj, UserObjectPermissionBase,
UserObjectPermission)
self.assertEqual(perm_model, UserObjectPermission)
def test_file_field(self):
class SomeModel2(models.Model):
file = models.FileField(upload_to='images/')
obj = SomeModel2()
perm_model = get_obj_perms_model(obj, UserObjectPermissionBase,
UserObjectPermission)
self.assertEqual(perm_model, UserObjectPermission)
|
benkonrath/django-guardian
|
guardian/testapp/tests/test_utils.py
|
Python
|
bsd-2-clause
| 4,406
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# CherryMusic - a standalone music server
# Copyright (c) 2012 - 2016 Tom Wallroth & Tilman Boerner
#
# Project page:
# http://fomori.org/cherrymusic/
# Sources on github:
# http://github.com/devsnd/cherrymusic/
#
# CherryMusic is based on
# jPlayer (GPL/MIT license) http://www.jplayer.org/
# CherryPy (BSD license) http://www.cherrypy.org/
#
# licensed under GNU GPL version 3 (or later)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
#python 2.6+ backward compability
from __future__ import unicode_literals
VERSION = "0.41.2"
__version__ = VERSION
DESCRIPTION = "an mp3 server for your browser"
LONG_DESCRIPTION = """CherryMusic is a music streaming
server written in python. It's based on cherrypy and jPlayer.
You can search your collection, create and share playlists with
other users. It's able to play music on almost all devices since
it happens in your browser and uses HTML5 for audio playback.
"""
from backport import input
import re
import os
import codecs
import sys
import threading
import signal
import logging
logger = logging.getLogger(__name__)
import gettext
from cherrymusicserver import pathprovider
if sys.version_info < (3,):
gettext.install('default', unicode=True, localedir=pathprovider.getResourcePath('res/i18n'))
else:
gettext.install('default', localedir=pathprovider.getResourcePath('res/i18n'))
# woraround for cherrypy 3.2.2:
# https://bitbucket.org/cherrypy/cherrypy/issue/1163/attributeerror-in-cherrypyprocessplugins
if sys.version_info >= (3, 3):
threading._Timer = threading.Timer
import cherrypy
def version():
return """CherryMusic Server {cm_version}
a standalone music server
Copyright (c) 2012 - 2014 Tom Wallroth & Tilman Boerner""".format(cm_version=VERSION)
def info():
import locale
import platform
from audiotranscode import AudioTranscode
audiotranscode = AudioTranscode()
encoders = ['%s (%s)' % (enc.filetype, enc.command[0])
for enc in audiotranscode.available_encoders]
decoders = ['%s (%s)' % (enc.filetype, enc.command[0])
for enc in audiotranscode.available_decoders]
return """CherryMusic Server {cm_version}
CherryPy: {cp_version}
Python: {py_version}
Platform: {platform}
configuration dir:
{confdir}
server data dir:
{datadir}
static resources dir:
{resourcedir}
server package dir:
{packdir}
process working dir:
{workdir}
locale: {locale}, default: {deflocale}
filesystem encoding: {fs_encoding}
Available Decoders:
{decoders}
Available Encoders:
{encoders}
(Do not parse this output.)""".format(
cm_version=REPO_VERSION or VERSION,
cp_version=cherrypy.__version__,
py_version=platform.python_implementation() + ' ' + platform.python_version(),
platform=platform.platform(),
workdir=os.path.abspath(os.curdir),
packdir=os.path.abspath(__path__[0]),
confdir=pathprovider.getConfigPath(),
datadir=pathprovider.getUserDataPath(),
resourcedir=pathprovider.getResourcePath(''),
locale=str(locale.getlocale()),
deflocale=str(locale.getdefaultlocale()),
fs_encoding=sys.getfilesystemencoding(),
encoders='\n '.join(encoders),
decoders='\n '.join(decoders),
)
# patch cherrypy crashing on startup because of double checking
# for loopback interface, see:
# https://bitbucket.org/cherrypy/cherrypy/issue/1100/cherrypy-322-gives-engine-error-when
def fake_wait_for_occupied_port(host, port):
return
cherrypy.process.servers.wait_for_occupied_port = fake_wait_for_occupied_port
# end of port patch
try:
cherrypy_version = tuple(int(v) for v in cherrypy.__version__.split('.'))
min_major_cherrypy_version = 3
if cherrypy_version[0] < min_major_cherrypy_version:
print(_(
'cherrypy version is too old!\n'
'Current version: %s\n'
'Required version: %s or higher\n'
) % (cherrypy.__version__, min_major_cherrypy_version))
sys.exit(1)
except Exception as exc:
logger.error(_(
'Could not determine cherrypy version. Please install cherrypy '
'using pip or your OS\'s package manager. Trying to detect version '
'automatically.'
))
cherrypy_version = 'unknown'
# trying to detect the version to determine if we need to monkeypatch cherrypy
if cherrypy_version == 'unknown':
# decorator `cherrypy._cptools.register` was added between 5.4 and 5.5
# https://github.com/cherrypy/cherrypy/pull/1428
# commit: dff09e92fb2e83fb4248826c9bc14cd3b6281706
if 'register' in dir(cherrypy._cptools.Toolbox):
needs_serve_file_utf8_fix = False
else:
needs_serve_file_utf8_fix = True
else:
needs_serve_file_utf8_fix = cherrypy_version < (5, 5)
if needs_serve_file_utf8_fix:
# workaround for cherrypy < 5.5.0 not using unicode strings for URI, see:
# https://bitbucket.org/cherrypy/cherrypy/issue/1148/wrong-encoding-for-urls-containing-utf-8
cherrypy.lib.static.__serve_file = cherrypy.lib.static.serve_file
def serve_file_utf8_fix(path, content_type=None, disposition=None,
name=None, debug=False):
if sys.version_info >= (3,):
#python3+
# see also below: mirrored mangling of basedir for '/serve' static dir
path = codecs.decode(codecs.encode(path, 'latin-1'), 'utf-8')
return cherrypy.lib.static.__serve_file(path, content_type,
disposition, name, debug)
cherrypy.lib.static.serve_file = serve_file_utf8_fix
# end of unicode workaround
from cherrymusicserver import configuration as cfg
config = None
from cherrymusicserver import cherrymodel
from cherrymusicserver import database
from cherrymusicserver import httphandler
from cherrymusicserver import log
from cherrymusicserver import migrations
from cherrymusicserver import playlistdb
from cherrymusicserver import service
from cherrymusicserver import sqlitecache
from cherrymusicserver import userdb
from cherrymusicserver import useroptiondb
from cherrymusicserver import api
import audiotranscode
MEDIA_MIMETYPES = audiotranscode.MIMETYPES.copy()
del audiotranscode
def setup_services():
""" services can be used by other parts of the program to easily access
different functions of cherrymusic by registering themselves as
service.user
See :mod:`~cherrymusicserver.services`.
"""
service.provide('filecache', sqlitecache.SQLiteCache)
service.provide('cherrymodel', cherrymodel.CherryModel)
service.provide('playlist', playlistdb.PlaylistDB)
service.provide('users', userdb.UserDB)
service.provide('useroptions', useroptiondb.UserOptionDB)
service.provide('dbconnector', database.sql.SQLiteConnector, kwargs={
'datadir': pathprovider.databaseFilePath(''),
'extension': 'db',
'connargs': {'check_same_thread': False},
})
def setup_config(override_dict=None):
""" Updates the internal configuration using the following hierarchy:
override_dict > file_config > default_config
Notifies the user if there are new or deprecated configuration keys.
See :mod:`~cherrymusicserver.configuration`.
"""
defaults = cfg.from_defaults()
filecfg = cfg.from_configparser(pathprovider.configurationFile())
custom = defaults.replace(filecfg, on_error=log.e)
if override_dict:
custom = custom.replace(override_dict, on_error=log.e)
global config
config = custom
_notify_about_config_updates(defaults, filecfg)
def run_general_migrations():
""" Runs necessary migrations for CherryMusic data that is NOT kept inside
of databases.
This might however include relocating the database files tmhemselves,
so general migrations should run before migrating the database content.
See :mod:`~cherrymusicserver.migrations`.
"""
migrations.check_and_migrate_all()
def migrate_databases():
""" Makes sure CherryMusic's databases are up to date, migrating them if
necessary.
This might prompt the user for consent if a migration requires it and
terminate the program if no consent is obtained.
See :mod:`~cherrymusicserver.databases`.
"""
db_is_ready = database.ensure_current_version(
consentcallback=_get_user_consent_for_db_schema_update)
if not db_is_ready:
log.i(_("database schema update aborted. quitting."))
sys.exit(1)
def start_server(cfg_override=None):
""" Initializes and starts the CherryMusic server
Args:
cfg_override: A mapping of config keys to values to override those
in the config file.
"""
CherryMusic(cfg_override)
def create_user(username, password):
""" Creates a non-admin user with given username and password """
non_alnum = re.compile('[^a-z0-9]', re.IGNORECASE)
if non_alnum.findall(username):
log.e(_('usernames may only contain letters and digits'))
return False
return service.get('users').addUser(username, password, admin=False)
def delete_user(username):
userservice = service.get('users')
userid = userservice.getIdByName(username)
if userid is None:
log.e(_('user with the name "%s" does not exist!'), username)
return False
return userservice.deleteUser(userid)
def change_password(username, password):
userservice = service.get('users')
result = userservice.changePassword(username, password)
return result == 'success'
def update_filedb(paths):
""" Updates the file database in a separate thread,
possibly limited to a sequence of paths inside media.basedir
See :cls:`~cherrymusicserver.sqlitecache.SQLiteCache` methods
:meth:`~cherrymusicserver.sqlitecache.SQLiteCache.full_update` and
:meth:`~cherrymusicserver.sqlitecache.SQLiteCache.parital_update`.
"""
cache = sqlitecache.SQLiteCache()
target = cache.partial_update if paths else cache.full_update
updater = threading.Thread(name='Updater', target=target, args=paths)
updater.start()
def create_default_config_file(path):
""" Creates or overwrites a default configuration file at `path` """
cfg.write_to_file(cfg.from_defaults(), path)
log.i(_('Default configuration file written to %(path)r'), {'path': path})
class CherryMusic:
"""Sets up services (configuration, database, etc) and starts the server"""
def __init__(self, cfg_override=None):
self.setup_config(cfg_override)
setup_services()
if config['media.basedir'] is None:
print(_("Invalid basedir. Please provide a valid basedir path."))
sys.exit(1)
else:
log.debug("Basedir is %r", config['media.basedir'])
signal.signal(signal.SIGTERM, CherryMusic.stopAndCleanUp)
signal.signal(signal.SIGINT, CherryMusic.stopAndCleanUp)
if os.name == 'posix':
signal.signal(signal.SIGHUP, CherryMusic.stopAndCleanUp)
CherryMusic.create_pid_file()
self.start_server(httphandler.HTTPHandler(config))
CherryMusic.delete_pid_file()
@classmethod
def createUser(cls, credentials):
""" .. deprecated:: > 0.34.1
Use :func:`~cherrymusicserver.create_user` instead.
"""
username, password = credentials
return create_user(username, password)
@classmethod
def stopAndCleanUp(cls, signal=None, stackframe=None):
"""Delete the process id file and exit"""
CherryMusic.delete_pid_file()
print('Exiting...')
sys.exit(0)
@classmethod
def create_pid_file(cls):
"""create a process id file, exit if it already exists"""
if pathprovider.pidFileExists():
with open(pathprovider.pidFile(), 'r') as pidfile:
try:
if not sys.platform.startswith('win'):
# this call is only available on unix systems and throws
# an OSError if the process does not exist.
os.getpgid(int(pidfile.read()))
sys.exit(_("""============================================
Process id file %s already exists.
If you are sure that cherrymusic is not running, you can delete this file and restart cherrymusic.
============================================""") % pathprovider.pidFile())
except OSError:
print('Stale process id file, removing.')
cls.delete_pid_file()
with open(pathprovider.pidFile(), 'w') as pidfile:
pidfile.write(str(os.getpid()))
@classmethod
def delete_pid_file(cls):
"""Delete the process id file, if it exists"""
if pathprovider.pidFileExists():
os.remove(pathprovider.pidFile())
else:
print(_("Error removing pid file, doesn't exist!"))
@classmethod
def setup_services(cls):
"""setup services: they can be used by other parts of the program
to easily access different functions of cherrymusic by registering
themselves as service.user
.. deprecated:: > 0.34.1
Use :func:`~cherrymusicserver.setup_services` instead.
"""
setup_services()
def setup_config(self, cfg_override):
""".. deprecated:: > 0.34.1
Use :func:`~cherrymusicserver.setup_config` instead.
"""
setup_config(cfg_override)
def setup_databases(self):
""" check if the db schema is up to date
.. deprecated:: > 0.34.1
Use :func:`~cherrymusicserver.migrate_databases` instead.
"""
migrate_databases()
def start_server(self, httphandler):
"""use the configuration to setup and start the cherrypy server
"""
cherrypy.config.update({'log.screen': True})
ipv6_enabled = config['server.ipv6_enabled']
if config['server.localhost_only']:
socket_host = "::1" if ipv6_enabled else "127.0.0.1"
else:
socket_host = "::" if ipv6_enabled else "0.0.0.0"
resourcedir = os.path.abspath(pathprovider.getResourcePath('res'))
if config['server.ssl_enabled']:
cert = pathprovider.absOrConfigPath(config['server.ssl_certificate'])
pkey = pathprovider.absOrConfigPath(config['server.ssl_private_key'])
cherrypy.config.update({
'server.ssl_certificate': cert,
'server.ssl_private_key': pkey,
'server.socket_port': config['server.ssl_port'],
})
# Create second server for redirecting http to https:
redirecter = cherrypy._cpserver.Server()
redirecter.socket_port = config['server.port']
redirecter._socket_host = socket_host
redirecter.thread_pool = 10
redirecter.subscribe()
else:
cherrypy.config.update({
'server.socket_port': config['server.port'],
})
cherrypy.config.update({
'log.error_file': os.path.join(
pathprovider.getUserDataPath(), 'server.log'),
'environment': 'production',
'server.socket_host': socket_host,
'server.thread_pool': 30,
'tools.sessions.on': True,
'tools.sessions.timeout': int(config.get('server.session_duration', 60 * 24)),
})
if not config['server.keep_session_in_ram']:
sessiondir = os.path.join(
pathprovider.getUserDataPath(), 'sessions')
if not os.path.exists(sessiondir):
os.mkdir(sessiondir)
cherrypy.config.update({
'tools.sessions.storage_type': "file",
'tools.sessions.storage_path': sessiondir,
})
basedirpath = config['media.basedir']
if sys.version_info < (3,0):
basedirpath = codecs.encode(basedirpath, 'utf-8')
scriptname = codecs.encode(config['server.rootpath'], 'utf-8')
else:
if needs_serve_file_utf8_fix:
# fix cherrypy unicode issue (only for Python3)
# see patch to cherrypy.lib.static.serve_file way above and
# https://bitbucket.org/cherrypy/cherrypy/issue/1148/wrong-encoding-for-urls-containing-utf-8
basedirpath = codecs.decode(codecs.encode(basedirpath, 'utf-8'), 'latin-1')
scriptname = config['server.rootpath']
cherrypy.tree.mount(
httphandler, scriptname,
config={
'/res': {
'tools.staticdir.on': True,
'tools.staticdir.dir': resourcedir,
'tools.staticdir.index': 'index.html',
'tools.caching.on': False,
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/javascript', 'text/css'],
'tools.gzip.on': True,
},
'/serve': {
'tools.staticdir.on': True,
'tools.staticdir.dir': basedirpath,
# 'tools.staticdir.index': 'index.html', if ever needed: in py2 MUST utf-8 encode
'tools.staticdir.content_types': MEDIA_MIMETYPES,
'tools.encode.on': True,
'tools.encode.encoding': 'utf-8',
'tools.caching.on': False,
'tools.cm_auth.on': True,
'tools.cm_auth.httphandler': httphandler,
},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': resourcedir + '/img/favicon.ico',
}})
api.v1.mount('/api/v1')
log.i(_('Starting server on port %s ...') % config['server.port'])
cherrypy.lib.caching.expires(0) # disable expiry caching
cherrypy.engine.start()
cherrypy.engine.block()
def _cm_auth_tool(httphandler):
if not httphandler.isAuthorized():
raise cherrypy.HTTPError(403)
cherrypy.tools.cm_auth = cherrypy.Tool(
'before_handler', _cm_auth_tool, priority=70)
# priority=70 -->> make tool run after session is locked (at 50)
def _get_user_consent_for_db_schema_update(reasons):
"""Ask the user if the database schema update should happen now
"""
import textwrap
wrap = lambda r: os.linesep.join(
textwrap.wrap(r, initial_indent=' - ', subsequent_indent=" "))
msg = _("""
==========================================================================
A database schema update is needed and requires your consent.
{reasons}
To continue without changes, you need to downgrade to an earlier
version of CherryMusic.
To backup your database files first, abort for now and find them here:
{dblocation}
==========================================================================
Run schema update? [y/N]: """).format(
reasons=(2 * os.linesep).join(wrap(r) for r in reasons),
dblocation='\t' + pathprovider.databaseFilePath(''))
return input(msg).lower().strip() in ('y',)
def _notify_about_config_updates(default, known_config):
"""check if there are new or deprecated configuration keys in
the config file
"""
new = []
deprecated = []
transform = lambda s: '[{0}]: {2}'.format(*(s.partition('.')))
for property in cfg.to_list(default):
if property.key not in known_config and not property.hidden:
new.append(transform(property.key))
for property in cfg.to_list(known_config):
if property.key not in default:
deprecated.append(transform(property.key))
if new:
log.i(_('''New configuration options available:
%s
Using default values for now.'''),
'\n\t\t\t'.join(new))
if deprecated:
log.i(_('''The following configuration options are not used anymore:
%s'''),
'\n\t\t\t'.join(deprecated))
if new or deprecated:
log.i(_('Start with --newconfig to generate a new default config'
' file next to your current one.'))
def _get_version_from_git():
""" Returns more precise version string based on the current git HEAD,
or None if not possible.
"""
if not os.path.isdir('.git'):
return None
def fetch(cmdname):
import re
from subprocess import Popen, PIPE
cmd = {
'branch': ['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
'version': ['git', 'describe', '--tags'],
'date': ['git', 'log', '-1', '--format=%cd'],
}
unwanted_characters = re.compile('[^\w.-]+')
try:
with open(os.devnull, 'w') as devnull:
p = Popen(cmd[cmdname], stdout=PIPE, stderr=devnull)
out, err = p.communicate() # blocks until process terminates
except:
return None
if out:
out = out.decode('ascii', 'ignore')
out = unwanted_characters.sub('', out).strip()
return out
branch = fetch('branch')
version = fetch('version')
if branch and version and '-' in version:
version, patchlevel = version.split('-', 1)
if version == VERSION: # sanity check: latest tag is for VERSION
return '{0}+{1}-{2}'.format(version, branch, patchlevel)
return None
REPO_VERSION = _get_version_from_git()
|
MartijnRas/cherrymusic
|
cherrymusicserver/__init__.py
|
Python
|
gpl-3.0
| 22,205
|
"""
sentry.web.frontend.teams
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_protect
from sentry.constants import MEMBER_USER, MEMBER_OWNER
from sentry.models import PendingTeamMember, TeamMember
from sentry.permissions import can_add_team_member, can_remove_team, can_create_projects, \
can_create_teams, can_edit_team_member, can_remove_team_member
from sentry.plugins import plugins
from sentry.web.decorators import login_required, has_team_access
from sentry.web.forms.teams import NewTeamForm, NewTeamAdminForm, \
EditTeamForm, EditTeamAdminForm, EditTeamMemberForm, NewTeamMemberForm, \
InviteTeamMemberForm, RemoveTeamForm
from sentry.web.helpers import render_to_response
@login_required
def team_list(request):
return render_to_response('sentry/teams/list.html', {}, request)
@login_required
@csrf_protect
def create_new_team(request):
if not can_create_teams(request.user):
return HttpResponseRedirect(reverse('sentry'))
if request.user.has_perm('sentry.can_add_team'):
form_cls = NewTeamAdminForm
initial = {
'owner': request.user.username,
}
else:
form_cls = NewTeamForm
initial = {}
form = form_cls(request.POST or None, initial=initial)
if form.is_valid():
team = form.save(commit=False)
if not team.owner_id:
team.owner = request.user
team.save()
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
context = csrf(request)
context.update({
'form': form,
})
return render_to_response('sentry/teams/new.html', context, request)
@has_team_access(MEMBER_OWNER)
@csrf_protect
def manage_team(request, team):
result = plugins.first('has_perm', request.user, 'edit_team', team)
if result is False and not request.user.has_perm('sentry.can_change_team'):
return HttpResponseRedirect(reverse('sentry'))
if request.user.has_perm('sentry.can_add_team'):
form_cls = EditTeamAdminForm
else:
form_cls = EditTeamForm
form = form_cls(request.POST or None, initial={
'owner': team.owner,
}, instance=team)
if form.is_valid():
team = form.save()
return HttpResponseRedirect(request.path + '?success=1')
member_list = [(pm, pm.user) for pm in team.member_set.select_related('user').order_by('user__username')]
pending_member_list = [(pm, pm.email) for pm in team.pending_member_set.all().order_by('email')]
project_list = list(team.project_set.all())
context = csrf(request)
context.update({
'can_add_project': can_create_projects(request.user, team),
'can_add_member': can_add_team_member(request.user, team),
'can_remove_team': can_remove_team(request.user, team),
'page': 'details',
'form': form,
'team': team,
'member_list': member_list,
'pending_member_list': pending_member_list,
'project_list': project_list,
})
return render_to_response('sentry/teams/manage.html', context, request)
@has_team_access(MEMBER_OWNER)
@csrf_protect
def remove_team(request, team):
if not can_remove_team(request.user, team):
return HttpResponseRedirect(reverse('sentry'))
form = RemoveTeamForm(request.POST or None)
if form.is_valid():
team.delete()
return HttpResponseRedirect(reverse('sentry-team-list'))
context = csrf(request)
context.update({
'form': form,
'team': team,
})
return render_to_response('sentry/teams/remove.html', context, request)
@csrf_protect
@has_team_access(MEMBER_OWNER)
def new_team_member(request, team):
can_add_member = can_add_team_member(request.user, team)
if not can_add_member:
return HttpResponseRedirect(reverse('sentry'))
initial = {
'type': MEMBER_USER,
}
invite_form = InviteTeamMemberForm(team, request.POST or None, initial=initial, prefix='invite')
add_form = NewTeamMemberForm(team, request.POST or None, initial=initial, prefix='add')
if add_form.is_valid():
pm = add_form.save(commit=False)
pm.team = team
pm.save()
return HttpResponseRedirect(reverse('sentry-edit-team-member', args=[team.slug, pm.id]) + '?success=1')
elif invite_form.is_valid():
pm = invite_form.save(commit=False)
pm.team = team
pm.save()
pm.send_invite_email()
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]) + '?success=1')
context = csrf(request)
context.update({
'team': team,
'add_form': add_form,
'invite_form': invite_form,
})
return render_to_response('sentry/teams/members/new.html', context, request)
def accept_invite(request, member_id, token):
try:
pending_member = PendingTeamMember.objects.get(pk=member_id)
except PendingTeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry'))
if pending_member.token != token:
return HttpResponseRedirect(reverse('sentry'))
team = pending_member.team
if not request.user.is_authenticated():
# Show login or register form
context = {
'team': team,
}
return render_to_response('sentry/teams/members/accept_invite.html', context, request)
if team.member_set.filter(
user=request.user,
type=pending_member.type,
):
team.member_set.create(
user=request.user,
type=pending_member.type,
)
pending_member.delete()
return HttpResponseRedirect(reverse('sentry', args=[team.slug]))
@csrf_protect
@has_team_access(MEMBER_OWNER)
def edit_team_member(request, team, member_id):
try:
member = team.member_set.get(pk=member_id)
except TeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
if not can_edit_team_member(request.user, member):
return HttpResponseRedirect(reverse('sentry'))
form = EditTeamMemberForm(team, request.POST or None, instance=member)
if form.is_valid():
member = form.save(commit=True)
return HttpResponseRedirect(request.path + '?success=1')
context = csrf(request)
context.update({
'member': member,
'team': team,
'form': form,
})
return render_to_response('sentry/teams/members/edit.html', context, request)
@csrf_protect
@has_team_access(MEMBER_OWNER)
def remove_team_member(request, team, member_id):
try:
member = team.member_set.get(pk=member_id)
except TeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
if member.user == team.owner:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
if not can_remove_team_member(request.user, member):
return HttpResponseRedirect(reverse('sentry'))
if request.POST:
member.delete()
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
context = csrf(request)
context.update({
'member': member,
'team': team,
})
return render_to_response('sentry/teams/members/remove.html', context, request)
@csrf_protect
@has_team_access(MEMBER_OWNER)
def suspend_team_member(request, team, member_id):
try:
member = team.member_set.get(pk=member_id)
except TeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
if member.user == team.owner:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
result = plugins.first('has_perm', request.user, 'suspend_team_member', member)
if result is False and not request.user.has_perm('sentry.can_change_teammember'):
return HttpResponseRedirect(reverse('sentry'))
member.update(is_active=False)
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]) + '?success=1')
@csrf_protect
@has_team_access(MEMBER_OWNER)
def restore_team_member(request, team, member_id):
try:
member = team.member_set.get(pk=member_id)
except TeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
if member.user == team.owner:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
result = plugins.first('has_perm', request.user, 'restore_team_member', member)
if result is False and not request.user.has_perm('sentry.can_change_teammember'):
return HttpResponseRedirect(reverse('sentry'))
member.update(is_active=True)
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]) + '?success=1')
@csrf_protect
@has_team_access(MEMBER_OWNER)
def remove_pending_team_member(request, team, member_id):
try:
member = team.pending_member_set.get(pk=member_id)
except PendingTeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
result = plugins.first('has_perm', request.user, 'remove_team_member', member)
if result is False and not request.user.has_perm('sentry.can_remove_teammember'):
return HttpResponseRedirect(reverse('sentry'))
member.delete()
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]) + '?success=1')
@csrf_protect
@has_team_access(MEMBER_OWNER)
def reinvite_pending_team_member(request, team, member_id):
try:
member = team.pending_member_set.get(pk=member_id)
except PendingTeamMember.DoesNotExist:
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]))
result = plugins.first('has_perm', request.user, 'add_team_member', member)
if result is False and not request.user.has_perm('sentry.can_add_teammember'):
return HttpResponseRedirect(reverse('sentry'))
member.send_invite_email()
return HttpResponseRedirect(reverse('sentry-manage-team', args=[team.slug]) + '?success=1')
@csrf_protect
@has_team_access(MEMBER_OWNER)
def create_new_team_project(request, team):
from sentry.web.forms.projects import NewProjectAdminForm, NewProjectForm
if not can_create_projects(request.user, team):
return HttpResponseRedirect(reverse('sentry'))
if request.user.has_perm('sentry.can_add_project'):
form_cls = NewProjectAdminForm
initial = {
'owner': request.user.username,
}
else:
form_cls = NewProjectForm
initial = {}
form = form_cls(request.POST or None, initial=initial)
if form.is_valid():
project = form.save(commit=False)
project.team = team
if not project.owner:
project.owner = request.user
project.save()
return HttpResponseRedirect(reverse('sentry-manage-project', args=[project.slug]))
context = csrf(request)
context.update({
'form': form,
'team': team,
})
return render_to_response('sentry/teams/projects/new.html', context, request)
|
simmetria/sentry
|
src/sentry/web/frontend/teams.py
|
Python
|
bsd-3-clause
| 11,432
|
# Copyright (C) 2010 Simon Wessing
# TU Dortmund University
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy
try:
# try importing the C version
from ._hypervolume import hv as hv
except ImportError:
# fallback on python version
from ._hypervolume import pyhv as hv
def hypervolume(front, **kargs):
"""Returns the index of the individual with the least the hypervolume
contribution. The provided *front* should be a set of non-dominated
individuals having each a :attr:`fitness` attribute.
"""
# Must use wvalues * -1 since hypervolume use implicit minimization
# And minimization in deap use max on -obj
wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1
ref = kargs.get("ref", None)
if ref is None:
ref = numpy.max(wobj, axis=0) + 1
def contribution(i):
# The contribution of point p_i in point set P
# is the hypervolume of P without p_i
return hv.hypervolume(numpy.concatenate((wobj[:i], wobj[i+1:])), ref)
# Parallelization note: Cannot pickle local function
contrib_values = map(contribution, range(len(front)))
# Select the maximum hypervolume value (correspond to the minimum difference)
return numpy.argmax(contrib_values)
def additive_epsilon(front, **kargs):
"""Returns the index of the individual with the least the additive epsilon
contribution. The provided *front* should be a set of non-dominated
individuals having each a :attr:`fitness` attribute.
.. warning::
This function has not been tested.
"""
wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1
def contribution(i):
mwobj = numpy.ma.array(wobj)
mwobj[i] = numpy.ma.masked
return numpy.min(numpy.max(wobj[i] - mwobj, axis=1))
contrib_values = map(contribution, range(len(front)))
# Select the minimum contribution value
return numpy.argmin(contrib_values)
def multiplicative_epsilon(front, **kargs):
"""Returns the index of the individual with the least the multiplicative epsilon
contribution. The provided *front* should be a set of non-dominated
individuals having each a :attr:`fitness` attribute.
.. warning::
This function has not been tested.
"""
wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1
def contribution(i):
mwobj = numpy.ma.array(wobj)
mwobj[i] = numpy.ma.masked
return numpy.min(numpy.max(wobj[i] / mwobj, axis=1))
contrib_values = map(contribution, range(len(front)))
# Select the minimum contribution value
return numpy.argmin(contrib_values)
__all__ = ["hypervolume", "additive_epsilon", "multiplicative_epsilon"]
|
GrimRanger/GeneticAlgorithm
|
helps/deap/deap-master/deap/tools/indicator.py
|
Python
|
mit
| 3,372
|
from __future__ import division, print_function, absolute_import
__all__ = ['fixed_quad','quadrature','romberg','trapz','simps','romb',
'cumtrapz','newton_cotes']
from scipy.special.orthogonal import p_roots
from scipy.special import gammaln
from numpy import sum, ones, add, diff, isinf, isscalar, \
asarray, real, trapz, arange, empty
import numpy as np
import math
import warnings
from scipy.lib.six.moves import xrange
class AccuracyWarning(Warning):
pass
def fixed_quad(func,a,b,args=(),n=5):
"""
Compute a definite integral using fixed-order Gaussian quadrature.
Integrate `func` from `a` to `b` using Gaussian quadrature of
order `n`.
Parameters
----------
func : callable
A Python function or method to integrate (must accept vector inputs).
a : float
Lower limit of integration.
b : float
Upper limit of integration.
args : tuple, optional
Extra arguments to pass to function, if any.
n : int, optional
Order of quadrature integration. Default is 5.
Returns
-------
val : float
Gaussian quadrature approximation to the integral
See Also
--------
quad : adaptive quadrature using QUADPACK
dblquad : double integrals
tplquad : triple integrals
romberg : adaptive Romberg quadrature
quadrature : adaptive Gaussian quadrature
romb : integrators for sampled data
simps : integrators for sampled data
cumtrapz : cumulative integration for sampled data
ode : ODE integrator
odeint : ODE integrator
"""
[x,w] = p_roots(n)
x = real(x)
ainf, binf = map(isinf,(a,b))
if ainf or binf:
raise ValueError("Gaussian quadrature is only available for "
"finite limits.")
y = (b-a)*(x+1)/2.0 + a
return (b-a)/2.0*sum(w*func(y,*args),0), None
def vectorize1(func, args=(), vec_func=False):
"""Vectorize the call to a function.
This is an internal utility function used by `romberg` and
`quadrature` to create a vectorized version of a function.
If `vec_func` is True, the function `func` is assumed to take vector
arguments.
Parameters
----------
func : callable
User defined function.
args : tuple
Extra arguments for the function.
vec_func : bool
True if the function func takes vector arguments.
Returns
-------
vfunc : callable
A function that will take a vector argument and return the
result.
"""
if vec_func:
def vfunc(x):
return func(x, *args)
else:
def vfunc(x):
if isscalar(x):
return func(x, *args)
x = asarray(x)
# call with first point to get output type
y0 = func(x[0], *args)
n = len(x)
if hasattr(y0, 'dtype'):
output = empty((n,), dtype=y0.dtype)
else:
output = empty((n,), dtype=type(y0))
output[0] = y0
for i in xrange(1, n):
output[i] = func(x[i], *args)
return output
return vfunc
def quadrature(func, a, b, args=(), tol=1.49e-8, rtol=1.49e-8, maxiter=50,
vec_func=True):
"""
Compute a definite integral using fixed-tolerance Gaussian quadrature.
Integrate `func` from `a` to `b` using Gaussian quadrature
with absolute tolerance `tol`.
Parameters
----------
func : function
A Python function or method to integrate.
a : float
Lower limit of integration.
b : float
Upper limit of integration.
args : tuple, optional
Extra arguments to pass to function.
tol, rol : float, optional
Iteration stops when error between last two iterates is less than
`tol` OR the relative change is less than `rtol`.
maxiter : int, optional
Maximum number of iterations.
vec_func : bool, optional
True or False if func handles arrays as arguments (is
a "vector" function). Default is True.
Returns
-------
val : float
Gaussian quadrature approximation (within tolerance) to integral.
err : float
Difference between last two estimates of the integral.
See also
--------
romberg: adaptive Romberg quadrature
fixed_quad: fixed-order Gaussian quadrature
quad: adaptive quadrature using QUADPACK
dblquad: double integrals
tplquad: triple integrals
romb: integrator for sampled data
simps: integrator for sampled data
cumtrapz: cumulative integration for sampled data
ode: ODE integrator
odeint: ODE integrator
"""
vfunc = vectorize1(func, args, vec_func=vec_func)
val = np.inf
err = np.inf
for n in xrange(1, maxiter+1):
newval = fixed_quad(vfunc, a, b, (), n)[0]
err = abs(newval-val)
val = newval
if err < tol or err < rtol*abs(val):
break
else:
warnings.warn(
"maxiter (%d) exceeded. Latest difference = %e" % (maxiter, err),
AccuracyWarning)
return val, err
def tupleset(t, i, value):
l = list(t)
l[i] = value
return tuple(l)
def cumtrapz(y, x=None, dx=1.0, axis=-1, initial=None):
"""
Cumulatively integrate y(x) using the composite trapezoidal rule.
Parameters
----------
y : array_like
Values to integrate.
x : array_like, optional
The coordinate to integrate along. If None (default), use spacing `dx`
between consecutive elements in `y`.
dx : int, optional
Spacing between elements of `y`. Only used if `x` is None.
axis : int, optional
Specifies the axis to cumulate. Default is -1 (last axis).
initial : scalar, optional
If given, uses this value as the first value in the returned result.
Typically this value should be 0. Default is None, which means no
value at ``x[0]`` is returned and `res` has one element less than `y`
along the axis of integration.
Returns
-------
res : ndarray
The result of cumulative integration of `y` along `axis`.
If `initial` is None, the shape is such that the axis of integration
has one less value than `y`. If `initial` is given, the shape is equal
to that of `y`.
See Also
--------
numpy.cumsum, numpy.cumprod
quad: adaptive quadrature using QUADPACK
romberg: adaptive Romberg quadrature
quadrature: adaptive Gaussian quadrature
fixed_quad: fixed-order Gaussian quadrature
dblquad: double integrals
tplquad: triple integrals
romb: integrators for sampled data
ode: ODE integrators
odeint: ODE integrators
Examples
--------
>>> from scipy import integrate
>>> import matplotlib.pyplot as plt
>>> x = np.linspace(-2, 2, num=20)
>>> y = x
>>> y_int = integrate.cumtrapz(y, x, initial=0)
>>> plt.plot(x, y_int, 'ro', x, y[0] + 0.5 * x**2, 'b-')
>>> plt.show()
"""
y = asarray(y)
if x is None:
d = dx
else:
d = diff(x, axis=axis)
nd = len(y.shape)
slice1 = tupleset((slice(None),)*nd, axis, slice(1, None))
slice2 = tupleset((slice(None),)*nd, axis, slice(None, -1))
res = add.accumulate(d * (y[slice1] + y[slice2]) / 2.0, axis)
if initial is not None:
if not np.isscalar(initial):
raise ValueError("`initial` parameter should be a scalar.")
shape = list(res.shape)
shape[axis] = 1
res = np.concatenate([np.ones(shape, dtype=res.dtype) * initial, res],
axis=axis)
return res
def _basic_simps(y,start,stop,x,dx,axis):
nd = len(y.shape)
if start is None:
start = 0
step = 2
all = (slice(None),)*nd
slice0 = tupleset(all, axis, slice(start, stop, step))
slice1 = tupleset(all, axis, slice(start+1, stop+1, step))
slice2 = tupleset(all, axis, slice(start+2, stop+2, step))
if x is None: # Even spaced Simpson's rule.
result = add.reduce(dx/3.0 * (y[slice0]+4*y[slice1]+y[slice2]),
axis)
else:
# Account for possibly different spacings.
# Simpson's rule changes a bit.
h = diff(x,axis=axis)
sl0 = tupleset(all, axis, slice(start, stop, step))
sl1 = tupleset(all, axis, slice(start+1, stop+1, step))
h0 = h[sl0]
h1 = h[sl1]
hsum = h0 + h1
hprod = h0 * h1
h0divh1 = h0 / h1
result = add.reduce(hsum/6.0*(y[slice0]*(2-1.0/h0divh1) +
y[slice1]*hsum*hsum/hprod +
y[slice2]*(2-h0divh1)),axis)
return result
def simps(y, x=None, dx=1, axis=-1, even='avg'):
"""
Integrate y(x) using samples along the given axis and the composite
Simpson's rule. If x is None, spacing of dx is assumed.
If there are an even number of samples, N, then there are an odd
number of intervals (N-1), but Simpson's rule requires an even number
of intervals. The parameter 'even' controls how this is handled.
Parameters
----------
y : array_like
Array to be integrated.
x : array_like, optional
If given, the points at which `y` is sampled.
dx : int, optional
Spacing of integration points along axis of `y`. Only used when
`x` is None. Default is 1.
axis : int, optional
Axis along which to integrate. Default is the last axis.
even : {'avg', 'first', 'str'}, optional
'avg' : Average two results:1) use the first N-2 intervals with
a trapezoidal rule on the last interval and 2) use the last
N-2 intervals with a trapezoidal rule on the first interval.
'first' : Use Simpson's rule for the first N-2 intervals with
a trapezoidal rule on the last interval.
'last' : Use Simpson's rule for the last N-2 intervals with a
trapezoidal rule on the first interval.
See Also
--------
quad: adaptive quadrature using QUADPACK
romberg: adaptive Romberg quadrature
quadrature: adaptive Gaussian quadrature
fixed_quad: fixed-order Gaussian quadrature
dblquad: double integrals
tplquad: triple integrals
romb: integrators for sampled data
cumtrapz: cumulative integration for sampled data
ode: ODE integrators
odeint: ODE integrators
Notes
-----
For an odd number of samples that are equally spaced the result is
exact if the function is a polynomial of order 3 or less. If
the samples are not equally spaced, then the result is exact only
if the function is a polynomial of order 2 or less.
"""
y = asarray(y)
nd = len(y.shape)
N = y.shape[axis]
last_dx = dx
first_dx = dx
returnshape = 0
if not x is None:
x = asarray(x)
if len(x.shape) == 1:
shapex = ones(nd)
shapex[axis] = x.shape[0]
saveshape = x.shape
returnshape = 1
x = x.reshape(tuple(shapex))
elif len(x.shape) != len(y.shape):
raise ValueError("If given, shape of x must be 1-d or the "
"same as y.")
if x.shape[axis] != N:
raise ValueError("If given, length of x along axis must be the "
"same as y.")
if N % 2 == 0:
val = 0.0
result = 0.0
slice1 = (slice(None),)*nd
slice2 = (slice(None),)*nd
if not even in ['avg', 'last', 'first']:
raise ValueError("Parameter 'even' must be 'avg', 'last', or 'first'.")
# Compute using Simpson's rule on first intervals
if even in ['avg', 'first']:
slice1 = tupleset(slice1, axis, -1)
slice2 = tupleset(slice2, axis, -2)
if not x is None:
last_dx = x[slice1] - x[slice2]
val += 0.5*last_dx*(y[slice1]+y[slice2])
result = _basic_simps(y,0,N-3,x,dx,axis)
# Compute using Simpson's rule on last set of intervals
if even in ['avg', 'last']:
slice1 = tupleset(slice1, axis, 0)
slice2 = tupleset(slice2, axis, 1)
if not x is None:
first_dx = x[tuple(slice2)] - x[tuple(slice1)]
val += 0.5*first_dx*(y[slice2]+y[slice1])
result += _basic_simps(y,1,N-2,x,dx,axis)
if even == 'avg':
val /= 2.0
result /= 2.0
result = result + val
else:
result = _basic_simps(y,0,N-2,x,dx,axis)
if returnshape:
x = x.reshape(saveshape)
return result
def romb(y, dx=1.0, axis=-1, show=False):
"""
Romberg integration using samples of a function.
Parameters
----------
y : array_like
A vector of ``2**k + 1`` equally-spaced samples of a function.
dx : array_like, optional
The sample spacing. Default is 1.
axis : int, optional
The axis along which to integrate. Default is -1 (last axis).
show : bool, optional
When `y` is a single 1-D array, then if this argument is True
print the table showing Richardson extrapolation from the
samples. Default is False.
Returns
-------
romb : ndarray
The integrated result for `axis`.
See also
--------
quad : adaptive quadrature using QUADPACK
romberg : adaptive Romberg quadrature
quadrature : adaptive Gaussian quadrature
fixed_quad : fixed-order Gaussian quadrature
dblquad : double integrals
tplquad : triple integrals
simps : integrators for sampled data
cumtrapz : cumulative integration for sampled data
ode : ODE integrators
odeint : ODE integrators
"""
y = asarray(y)
nd = len(y.shape)
Nsamps = y.shape[axis]
Ninterv = Nsamps-1
n = 1
k = 0
while n < Ninterv:
n <<= 1
k += 1
if n != Ninterv:
raise ValueError("Number of samples must be one plus a "
"non-negative power of 2.")
R = {}
all = (slice(None),) * nd
slice0 = tupleset(all, axis, 0)
slicem1 = tupleset(all, axis, -1)
h = Ninterv*asarray(dx)*1.0
R[(1,1)] = (y[slice0] + y[slicem1])/2.0*h
slice_R = all
start = stop = step = Ninterv
for i in range(2,k+1):
start >>= 1
slice_R = tupleset(slice_R, axis, slice(start,stop,step))
step >>= 1
R[(i,1)] = 0.5*(R[(i-1,1)] + h*add.reduce(y[slice_R],axis))
for j in range(2,i+1):
R[(i,j)] = R[(i,j-1)] + \
(R[(i,j-1)]-R[(i-1,j-1)]) / ((1 << (2*(j-1)))-1)
h = h / 2.0
if show:
if not isscalar(R[(1,1)]):
print("*** Printing table only supported for integrals" +
" of a single data set.")
else:
try:
precis = show[0]
except (TypeError, IndexError):
precis = 5
try:
width = show[1]
except (TypeError, IndexError):
width = 8
formstr = "%" + str(width) + '.' + str(precis)+'f'
print("\n Richardson Extrapolation Table for Romberg Integration ")
print("====================================================================")
for i in range(1,k+1):
for j in range(1,i+1):
print(formstr % R[(i,j)], end=' ')
print()
print("====================================================================\n")
return R[(k,k)]
# Romberg quadratures for numeric integration.
#
# Written by Scott M. Ransom <ransom@cfa.harvard.edu>
# last revision: 14 Nov 98
#
# Cosmetic changes by Konrad Hinsen <hinsen@cnrs-orleans.fr>
# last revision: 1999-7-21
#
# Adapted to scipy by Travis Oliphant <oliphant.travis@ieee.org>
# last revision: Dec 2001
def _difftrap(function, interval, numtraps):
"""
Perform part of the trapezoidal rule to integrate a function.
Assume that we had called difftrap with all lower powers-of-2
starting with 1. Calling difftrap only returns the summation
of the new ordinates. It does _not_ multiply by the width
of the trapezoids. This must be performed by the caller.
'function' is the function to evaluate (must accept vector arguments).
'interval' is a sequence with lower and upper limits
of integration.
'numtraps' is the number of trapezoids to use (must be a
power-of-2).
"""
if numtraps <= 0:
raise ValueError("numtraps must be > 0 in difftrap().")
elif numtraps == 1:
return 0.5*(function(interval[0])+function(interval[1]))
else:
numtosum = numtraps/2
h = float(interval[1]-interval[0])/numtosum
lox = interval[0] + 0.5 * h
points = lox + h * arange(0, numtosum)
s = sum(function(points),0)
return s
def _romberg_diff(b, c, k):
"""
Compute the differences for the Romberg quadrature corrections.
See Forman Acton's "Real Computing Made Real," p 143.
"""
tmp = 4.0**k
return (tmp * c - b)/(tmp - 1.0)
def _printresmat(function, interval, resmat):
# Print the Romberg result matrix.
i = j = 0
print('Romberg integration of', repr(function), end=' ')
print('from', interval)
print('')
print('%6s %9s %9s' % ('Steps', 'StepSize', 'Results'))
for i in range(len(resmat)):
print('%6d %9f' % (2**i, (interval[1]-interval[0])/(2.**i)), end=' ')
for j in range(i+1):
print('%9f' % (resmat[i][j]), end=' ')
print('')
print('')
print('The final result is', resmat[i][j], end=' ')
print('after', 2**(len(resmat)-1)+1, 'function evaluations.')
def romberg(function, a, b, args=(), tol=1.48e-8, rtol=1.48e-8, show=False,
divmax=10, vec_func=False):
"""
Romberg integration of a callable function or method.
Returns the integral of `function` (a function of one variable)
over the interval (`a`, `b`).
If `show` is 1, the triangular array of the intermediate results
will be printed. If `vec_func` is True (default is False), then
`function` is assumed to support vector arguments.
Parameters
----------
function : callable
Function to be integrated.
a : float
Lower limit of integration.
b : float
Upper limit of integration.
Returns
-------
results : float
Result of the integration.
Other Parameters
----------------
args : tuple, optional
Extra arguments to pass to function. Each element of `args` will
be passed as a single argument to `func`. Default is to pass no
extra arguments.
tol, rtol : float, optional
The desired absolute and relative tolerances. Defaults are 1.48e-8.
show : bool, optional
Whether to print the results. Default is False.
divmax : int, optional
Maximum order of extrapolation. Default is 10.
vec_func : bool, optional
Whether `func` handles arrays as arguments (i.e whether it is a
"vector" function). Default is False.
See Also
--------
fixed_quad : Fixed-order Gaussian quadrature.
quad : Adaptive quadrature using QUADPACK.
dblquad : Double integrals.
tplquad : Triple integrals.
romb : Integrators for sampled data.
simps : Integrators for sampled data.
cumtrapz : Cumulative integration for sampled data.
ode : ODE integrator.
odeint : ODE integrator.
References
----------
.. [1] 'Romberg's method' http://en.wikipedia.org/wiki/Romberg%27s_method
Examples
--------
Integrate a gaussian from 0 to 1 and compare to the error function.
>>> from scipy import integrate
>>> from scipy.special import erf
>>> gaussian = lambda x: 1/np.sqrt(np.pi) * np.exp(-x**2)
>>> result = integrate.romberg(gaussian, 0, 1, show=True)
Romberg integration of <function vfunc at ...> from [0, 1]
::
Steps StepSize Results
1 1.000000 0.385872
2 0.500000 0.412631 0.421551
4 0.250000 0.419184 0.421368 0.421356
8 0.125000 0.420810 0.421352 0.421350 0.421350
16 0.062500 0.421215 0.421350 0.421350 0.421350 0.421350
32 0.031250 0.421317 0.421350 0.421350 0.421350 0.421350 0.421350
The final result is 0.421350396475 after 33 function evaluations.
>>> print("%g %g" % (2*result, erf(1)))
0.842701 0.842701
"""
if isinf(a) or isinf(b):
raise ValueError("Romberg integration only available for finite limits.")
vfunc = vectorize1(function, args, vec_func=vec_func)
n = 1
interval = [a,b]
intrange = b-a
ordsum = _difftrap(vfunc, interval, n)
result = intrange * ordsum
resmat = [[result]]
err = np.inf
for i in xrange(1, divmax+1):
n = n * 2
ordsum = ordsum + _difftrap(vfunc, interval, n)
resmat.append([])
resmat[i].append(intrange * ordsum / n)
for k in range(i):
resmat[i].append(_romberg_diff(resmat[i-1][k], resmat[i][k], k+1))
result = resmat[i][i]
lastresult = resmat[i-1][i-1]
err = abs(result - lastresult)
if err < tol or err < rtol*abs(result):
break
else:
warnings.warn(
"divmax (%d) exceeded. Latest difference = %e" % (divmax, err),
AccuracyWarning)
if show:
_printresmat(vfunc, interval, resmat)
return result
# Coefficients for Netwon-Cotes quadrature
#
# These are the points being used
# to construct the local interpolating polynomial
# a are the weights for Newton-Cotes integration
# B is the error coefficient.
# error in these coefficients grows as N gets larger.
# or as samples are closer and closer together
# You can use maxima to find these rational coefficients
# for equally spaced data using the commands
# a(i,N) := integrate(product(r-j,j,0,i-1) * product(r-j,j,i+1,N),r,0,N) / ((N-i)! * i!) * (-1)^(N-i);
# Be(N) := N^(N+2)/(N+2)! * (N/(N+3) - sum((i/N)^(N+2)*a(i,N),i,0,N));
# Bo(N) := N^(N+1)/(N+1)! * (N/(N+2) - sum((i/N)^(N+1)*a(i,N),i,0,N));
# B(N) := (if (mod(N,2)=0) then Be(N) else Bo(N));
#
# pre-computed for equally-spaced weights
#
# num_a, den_a, int_a, num_B, den_B = _builtincoeffs[N]
#
# a = num_a*array(int_a)/den_a
# B = num_B*1.0 / den_B
#
# integrate(f(x),x,x_0,x_N) = dx*sum(a*f(x_i)) + B*(dx)^(2k+3) f^(2k+2)(x*)
# where k = N // 2
#
_builtincoeffs = {
1:(1,2,[1,1],-1,12),
2:(1,3,[1,4,1],-1,90),
3:(3,8,[1,3,3,1],-3,80),
4:(2,45,[7,32,12,32,7],-8,945),
5:(5,288,[19,75,50,50,75,19],-275,12096),
6:(1,140,[41,216,27,272,27,216,41],-9,1400),
7:(7,17280,[751,3577,1323,2989,2989,1323,3577,751],-8183,518400),
8:(4,14175,[989,5888,-928,10496,-4540,10496,-928,5888,989],
-2368,467775),
9:(9,89600,[2857,15741,1080,19344,5778,5778,19344,1080,
15741,2857], -4671, 394240),
10:(5,299376,[16067,106300,-48525,272400,-260550,427368,
-260550,272400,-48525,106300,16067],
-673175, 163459296),
11:(11,87091200,[2171465,13486539,-3237113, 25226685,-9595542,
15493566,15493566,-9595542,25226685,-3237113,
13486539,2171465], -2224234463, 237758976000),
12:(1, 5255250, [1364651,9903168,-7587864,35725120,-51491295,
87516288,-87797136,87516288,-51491295,35725120,
-7587864,9903168,1364651], -3012, 875875),
13:(13, 402361344000,[8181904909, 56280729661, -31268252574,
156074417954,-151659573325,206683437987,
-43111992612,-43111992612,206683437987,
-151659573325,156074417954,-31268252574,
56280729661,8181904909], -2639651053,
344881152000),
14:(7, 2501928000, [90241897,710986864,-770720657,3501442784,
-6625093363,12630121616,-16802270373,19534438464,
-16802270373,12630121616,-6625093363,3501442784,
-770720657,710986864,90241897], -3740727473,
1275983280000)
}
def newton_cotes(rn, equal=0):
"""
Return weights and error coefficient for Newton-Cotes integration.
Suppose we have (N+1) samples of f at the positions
x_0, x_1, ..., x_N. Then an N-point Newton-Cotes formula for the
integral between x_0 and x_N is:
:math:`\\int_{x_0}^{x_N} f(x)dx = \\Delta x \\sum_{i=0}^{N} a_i f(x_i)
+ B_N (\\Delta x)^{N+2} f^{N+1} (\\xi)`
where :math:`\\xi \\in [x_0,x_N]` and :math:`\\Delta x = \\frac{x_N-x_0}{N}`
is the averages samples spacing.
If the samples are equally-spaced and N is even, then the error
term is :math:`B_N (\\Delta x)^{N+3} f^{N+2}(\\xi)`.
Parameters
----------
rn : int
The integer order for equally-spaced data or the relative positions of
the samples with the first sample at 0 and the last at N, where N+1 is
the length of `rn`. N is the order of the Newton-Cotes integration.
equal : int, optional
Set to 1 to enforce equally spaced data.
Returns
-------
an : ndarray
1-D array of weights to apply to the function at the provided sample
positions.
B : float
Error coefficient.
Notes
-----
Normally, the Newton-Cotes rules are used on smaller integration
regions and a composite rule is used to return the total integral.
"""
try:
N = len(rn)-1
if equal:
rn = np.arange(N+1)
elif np.all(np.diff(rn) == 1):
equal = 1
except:
N = rn
rn = np.arange(N+1)
equal = 1
if equal and N in _builtincoeffs:
na, da, vi, nb, db = _builtincoeffs[N]
return na*np.array(vi,float)/da, float(nb)/db
if (rn[0] != 0) or (rn[-1] != N):
raise ValueError("The sample positions must start at 0"
" and end at N")
yi = rn / float(N)
ti = 2.0*yi - 1
nvec = np.arange(0,N+1)
C = np.mat(ti**nvec[:,np.newaxis])
Cinv = C.I
# improve precision of result
Cinv = 2*Cinv - Cinv*C*Cinv
Cinv = 2*Cinv - Cinv*C*Cinv
Cinv = Cinv.A
vec = 2.0 / (nvec[::2]+1)
ai = np.dot(Cinv[:,::2],vec) * N/2
if (N % 2 == 0) and equal:
BN = N/(N+3.)
power = N+2
else:
BN = N/(N+2.)
power = N+1
BN = BN - np.dot(yi**power, ai)
p1 = power+1
fac = power*math.log(N) - gammaln(p1)
fac = math.exp(fac)
return ai, BN*fac
|
kmspriyatham/symath
|
scipy/scipy/integrate/quadrature.py
|
Python
|
apache-2.0
| 26,794
|
description = "check len() on cows"
filedata = """
{$
locals { x : [ 1, 2, 3],
y : [],
z : [ "bar", "jam", "jiggles", "wiggle" ],
d : { a : 1, b : 2, c: 3 } }
print (len (x), " ", len (y), " ", len (z), " ", len (d));
$}
"""
outcome = "3 0 4 3"
|
OkCupid/okws
|
test/regtest/cases/73.py
|
Python
|
gpl-2.0
| 291
|
#!/usr/bin/env python3
import unittest, inspect, os
from fn_helper import compare_output, strarray_setup
class TestJump(unittest.TestCase):
@unittest.skipIf('TRAVIS' in os.environ,
"FIXME: figure out why this doesn't work in travis")
def test_skip(self):
# See that we can jump with line number
curframe = inspect.currentframe()
cmds = ['step',
'jump %d' % (curframe.f_lineno+8),
'continue'] # 1
d = strarray_setup(cmds) # 2
d.core.start() # 3
############################## # 4...
x = 5
x = 6
x = 7
z = 8 # NOQA
##############################
d.core.stop(options={'remove': True})
out = ['-- x = 5', # x = 10 is shown in prompt, but not run.
'-- x = 6',
'-- z = 8 # NOQA']
compare_output(self, out, d, cmds)
self.assertEqual(5, x) # Make sure x = 6, 7 were skipped.
return
pass
if __name__ == '__main__':
unittest.main()
|
rocky/python3-trepan
|
test/functional/test-jump.py
|
Python
|
gpl-3.0
| 1,126
|
{
'name': 'Send Notifications By Emails',
'version': '1.0',
'category': 'notifications',
'depends': ['mail','sale','marketplace','auth_signup'],
'author': 'Genpex for Valeureux',
'website': 'https://www.wezer.org/',
'description': """
Features....
======================================
* 1
* 2
* 3
""",
'data': [
'edi/template_view.xml',
],
'installable': True,
'application': True,
}
|
Valeureux/wezer-exchange
|
__unreviewed__/community_send_notification/__openerp__.py
|
Python
|
agpl-3.0
| 452
|
import os
import glob
def getFnames(dir, dtype="apr", minimumsize=7000.):
# def getFnames(dir, type="apr", minimumsize=7000.):
fnames = []
# os.chdir("../data/ChungCheonDC/")
os.chdir("../data/IdongDC/")
# print glob.glob("*.apr")
for file in glob.glob("*.apr"):
if os.path.getsize(file) > minimumsize:
fnames.append(file)
os.chdir("../../notebook")
return fnames
|
sgkang/DamGeophysics
|
codes/Readfiles.py
|
Python
|
mit
| 414
|
from version import VERSION
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'matroid'))
setup(
name='matroid',
version=VERSION,
description='Matroid API Python Library',
author='Matroid',
author_email='support@matroid.com',
url='https://github.com/matroid/matroid-python',
install_requires=['requests'],
packages=['matroid'],
use_2to3=True
)
|
matroid/matroid-python
|
setup.py
|
Python
|
mit
| 504
|
import unicodecsv
from django.http import HttpResponse
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
"""
Generic csv export admin action.
based on http://djangosnippets.org/snippets/1697/
"""
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % unicode(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
GrayAreaorg/InnovateSF-Map
|
repsf/map/actions.py
|
Python
|
gpl-3.0
| 1,320
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import collections
import json
import os
import pytest
import curlrc
EXAMPLE_CONFIG = '''# output timing data
-s
-S
-o = /dev/null
-w = "url_effective: %{url_effective}\ntime_namelookup: %{time_namelookup}\ntime_connect: %{time_connect}\ntime_appconnect: %{time_appconnect}\ntime_pretransfer: %{time_pretransfer}\ntime_redirect: %{time_redirect}\ntime_starttransfer: %{time_starttransfer}\ntime_total: %{time_total}\n"
'''
@pytest.fixture(params=[
('url "curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url = "curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url="curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url ="curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url:"curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url : "curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url: "curl.haxx.se"', ['url', '"curl.haxx.se"']),
('url :"curl.haxx.se"', ['url', '"curl.haxx.se"']),
('-O', ['-O', True]),
])
def test_data(request):
return request.param
@pytest.fixture
def test_config(tmpdir):
p = tmpdir.join('time.rc')
p.write(EXAMPLE_CONFIG)
assert p.read() == EXAMPLE_CONFIG
return p
@pytest.fixture
def test_template():
return 'url_effective: %{url_effective}\ntime_namelookup: %{time_namelookup}\ntime_connect: %{time_connect}'
@pytest.fixture
def test_template_map():
template_map = collections.OrderedDict()
template_map['url_effective'] = '%{url_effective}'
template_map['time_namelookup'] = '%{time_namelookup}'
template_map['time_connect'] = '%{time_connect}'
return template_map
@pytest.fixture(params=[
(True, 'url_effective,time_namelookup,time_connect\n%{url_effective},%{time_namelookup},%{time_connect}\n'),
(False, '%{url_effective},%{time_namelookup},%{time_connect}\n'),
])
def test_template_as_csv(request):
return request.param
@pytest.fixture(params=[
(True, '''url_effective %{url_effective}
time_namelookup %{time_namelookup}
time_connect %{time_connect}
'''),
(False, '''%{url_effective}
%{time_namelookup}
%{time_connect}
'''),
])
def test_template_as_table(request):
return request.param
@pytest.fixture(params=[
(True, json.dumps(test_template_map(), indent=2) + '\n'),
(False, json.dumps(test_template_map()) + '\n'),
])
def test_template_as_json(request):
return request.param
class TestCurlConfig:
def test_split_lines(self, test_data):
(input, expected) = test_data
assert curlrc.CurlConfig.split_line(input) == expected
def test_from_file(self, test_config):
config = curlrc.CurlConfig.from_file(str(test_config))
assert config.name == 'time'
assert config.description == 'output timing data'
assert config.path == str(test_config)
assert config.template
class TestCurlTemplate:
def test_from_str(self, test_template, test_template_map):
tmpl = curlrc.CurlTemplate.from_str(test_template)
assert tmpl._map == test_template_map
def test_as_csv(self, test_template, test_template_as_csv):
tmpl = curlrc.CurlTemplate.from_str(test_template)
(input, expected) = test_template_as_csv
assert tmpl.as_csv(input) == expected
def test_as_table(self, test_template, test_template_as_table):
tmpl = curlrc.CurlTemplate.from_str(test_template)
(input, expected) = test_template_as_table
assert tmpl.as_table(input) == expected
def test_as_json(self, test_template, test_template_as_json):
tmpl = curlrc.CurlTemplate.from_str(test_template)
(input, expected) = test_template_as_json
assert tmpl.as_json(input) == expected
def test_curl_configs(tmpdir):
files = [
'example1.rc',
'example2.rc',
'example3'
]
for f in files:
p = tmpdir.join(f)
p.write('')
glob = [
str(tmpdir.join('example1.rc')),
str(tmpdir.join('example2.rc')),
]
assert curlrc.curl_configs(str(tmpdir)) == glob
|
benwebber/curlrc
|
test_curlrc.py
|
Python
|
mit
| 4,035
|
#
# Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""
binlog_purge_rpl test.
"""
import rpl_admin
from binlog_rotate import binlog_range_files_exists
from mysql.utilities.exception import MUTLibError
_DEFAULT_MYSQL_OPTS = (
'"--log-bin=mysql-bin --report-host=localhost '
'--report-port={0} --bind-address=:: "'
)
def flush_server_logs_(server, times=5):
"""Flush logs on a server
server[in] the instance server where to flush logs on
times[in] number of times to flush the logs.
"""
# Flush master binary log
server.exec_query("SET sql_log_bin = 0")
for _ in range(times):
server.exec_query("FLUSH LOCAL BINARY LOGS")
server.exec_query("SET sql_log_bin = 1")
class test(rpl_admin.test):
"""test binlog purge Utility
This test runs the mysqlbinlogpurge utility on a known topology.
"""
master_datadir = None
slaves = None
mask_ports = []
def check_prerequisites(self):
return self.check_num_servers(1)
def setup(self):
self.res_fname = "result.txt"
# Spawn servers
self.server0 = self.servers.get_server(0)
mysqld = _DEFAULT_MYSQL_OPTS.format(self.servers.view_next_port())
self.server1 = self.servers.spawn_server(
"rep_master_binlog_purge", mysqld, True
)
mysqld = _DEFAULT_MYSQL_OPTS.format(self.servers.view_next_port())
self.server2 = self.servers.spawn_server(
"rep_slave1_binlog_purge", mysqld, True
)
mysqld = _DEFAULT_MYSQL_OPTS.format(self.servers.view_next_port())
self.server3 = self.servers.spawn_server(
"rep_slave2_binlog_purge", mysqld, True
)
mysqld = _DEFAULT_MYSQL_OPTS.format(self.servers.view_next_port())
self.server4 = self.servers.spawn_server(
"rep_slave3_binlog_purge", mysqld, True
)
# Get master datadir
rows = self.server1.exec_query("SHOW VARIABLES LIKE 'datadir'")
if not rows:
raise MUTLibError("Unable to determine datadir of cloned server "
"at {0}:{1}".format(self.server1.host,
self.server1.port))
self.master_datadir = rows[0][1]
# Reset spawned servers (clear binary log and GTID_EXECUTED set)
self.reset_master()
self.mask_ports.append(self.server1.port)
self.mask_ports.append(self.server2.port)
self.mask_ports.append(self.server3.port)
self.mask_ports.append(self.server4.port)
slaves_list = [self.server2, self.server3, self.server4]
rpl_admin.test.reset_topology(self, slaves_list=slaves_list,
master=self.server1)
self.slaves = [self.server2, self.server3, self.server4]
# Flush master binary log
flush_server_logs_(self.server1)
return True
def run(self):
test_num = 1
master_conn = self.build_connection_string(self.server1).strip(' ')
slave1_conn = self.build_connection_string(self.server2).strip(' ')
slave2_conn = self.build_connection_string(self.server3).strip(' ')
slave3_conn = self.build_connection_string(self.server4).strip(' ')
comment = "mysqlrplshow.py"
cmd_opts = (
"-r --discover-slaves-login={0} "
).format(master_conn.split('@')[0])
cmds = "mysqlrplshow.py --master={0} {1}".format(master_conn, cmd_opts)
self.run_test_case(0, cmds, comment)
cmd_str = "mysqlbinlogpurge.py --master={0} ".format(master_conn)
cmd_opts = "--discover-slaves={0} ".format(master_conn.split('@')[0])
comment = ("Test case {0} - mysqlbinlogpurge: with discover option"
"".format(test_num))
cmds = "{0} {1}".format(cmd_str, cmd_opts)
res = self.run_test_case(0, cmds, comment)
# Binlog Files 1 and 5 must not exists
if not res or True in binlog_range_files_exists((1, 5),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: with discover "
"and verbose options".format(test_num))
cmds = ("{0} {1} {2} -vv"
"").format(cmd_str, cmd_opts, "binlog_purge{0}.log".format(1))
res = self.run_test_case(0, cmds, comment)
# Binlog Files 6 and 10 must not exists
if not res or True in binlog_range_files_exists((6, 10),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
rpl_admin.test.reset_topology(self, slaves_list=self.slaves,
master=self.server1)
cmd_opts = "--slaves={0},{1},{2} ".format(slave1_conn, slave2_conn,
slave3_conn)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: slaves option"
"".format(test_num))
cmds = "{0} {1} ".format(cmd_str, cmd_opts)
res = self.run_test_case(0, cmds, comment)
# Binlog Files 11 and 15 must not exists
if not res or True in binlog_range_files_exists((11, 15),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
rpl_admin.test.reset_topology(self, slaves_list=self.slaves,
master=self.server1)
cmd_opts = "--slaves={0},{1},{2} ".format(slave1_conn, slave2_conn,
slave3_conn)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: slaves and verbose "
"options".format(test_num))
cmds = "{0} {1} -vv".format(cmd_str, cmd_opts)
res = self.run_test_case(0, cmds, comment)
# Binlog Files 16 and 20 must not exists
if not res or True in binlog_range_files_exists((16, 20),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
rpl_admin.test.reset_topology(self, slaves_list=self.slaves,
master=self.server1)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: --binlog "
"option".format(test_num))
opts = "{0} --binlog={1}".format(cmd_opts, "mysql-bin.0000023")
cmds = "{0} {1}".format(cmd_str, opts)
res = self.run_test_case(0, cmds, comment)
# Binlog Files 21 and 22 must not exists
if not res or True in binlog_range_files_exists((21, 22),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
rpl_admin.test.reset_topology(self, slaves_list=self.slaves,
master=self.server1)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: --binlog "
"option and verbose".format(test_num))
opts = "{0} --binlog={1}".format(cmd_opts, "mysql-bin.27")
cmds = "{0} {1} -v".format(cmd_str, opts)
res = self.run_test_case(0, cmds, comment)
# Binlog Files 23 and 26 must not exists
if not res or True in binlog_range_files_exists((23, 26),
self.master_datadir,
debug=self.debug):
raise MUTLibError("{0}: failed".format(comment))
# Mask out non-deterministic data
rpl_admin.test.do_masks(self)
p_n = 0
for port in self.mask_ports:
p_n += 1
self.replace_substring(str(port), "PORT{0}".format(p_n))
io_reading = "I/O thread is currently reading: mysql-bin.{0}"
sql_executed = "executed by the SQL thread: mysql-bin.{0}"
# Mask binlog file numbers range, limited by calls to flush logs
for num in range(5, 62):
self.replace_substring(io_reading.format(repr(num).zfill(6)),
io_reading.format("XXXXXX"))
self.replace_substring(sql_executed.format(repr(num).zfill(6)),
sql_executed.format("XXXXXX"))
self.replace_result(
"# File position of the I/O thread:",
"# File position of the I/O thread: XXX\n"
)
return True
def get_result(self):
return self.compare(__name__, self.results)
def record(self):
return self.save_result_file(__name__, self.results)
def cleanup(self):
# Kill the servers that are only for this test.
kill_list = ['rep_master_binlog_purge',
'rep_slave1_binlog_purge',
'rep_slave2_binlog_purge',
'rep_slave3_binlog_purge']
return self.kill_server_list(kill_list)
|
mysql/mysql-utilities
|
mysql-test/suite/replication/t/binlog_purge_rpl.py
|
Python
|
gpl-2.0
| 10,386
|
from django.contrib.comments.models import Comment
from regressiontests.comment_tests.models import Author, Article
from regressiontests.comment_tests.tests import CommentTestCase
class CommentModelTests(CommentTestCase):
def testSave(self):
for c in self.createSomeComments():
self.failIfEqual(c.submit_date, None)
def testUserProperties(self):
c1, c2, c3, c4 = self.createSomeComments()
self.assertEqual(c1.name, "Joe Somebody")
self.assertEqual(c2.email, "jsomebody@example.com")
self.assertEqual(c3.name, "Frank Nobody")
self.assertEqual(c3.url, "http://example.com/~frank/")
self.assertEqual(c1.user, None)
self.assertEqual(c3.user, c4.user)
class CommentManagerTests(CommentTestCase):
def testInModeration(self):
"""Comments that aren't public are considered in moderation"""
c1, c2, c3, c4 = self.createSomeComments()
c1.is_public = False
c2.is_public = False
c1.save()
c2.save()
moderated_comments = list(Comment.objects.in_moderation().order_by("id"))
self.assertEqual(moderated_comments, [c1, c2])
def testRemovedCommentsNotInModeration(self):
"""Removed comments are not considered in moderation"""
c1, c2, c3, c4 = self.createSomeComments()
c1.is_public = False
c2.is_public = False
c2.is_removed = True
c1.save()
c2.save()
moderated_comments = list(Comment.objects.in_moderation())
self.assertEqual(moderated_comments, [c1])
def testForModel(self):
c1, c2, c3, c4 = self.createSomeComments()
article_comments = list(Comment.objects.for_model(Article).order_by("id"))
author_comments = list(Comment.objects.for_model(Author.objects.get(pk=1)))
self.assertEqual(article_comments, [c1, c3])
self.assertEqual(author_comments, [c2])
|
Smarsh/django
|
tests/regressiontests/comment_tests/tests/model_tests.py
|
Python
|
bsd-3-clause
| 1,920
|
import vrep
import numpy
import time
import sys
import matplotlib.pyplot as plt
hello = numpy.genfromtxt('hello.csv',delimiter=',',skip_header=3,usecols = (1, 2, 3),dtype=numpy.float)
object_name = 'feltPen_invisible'
# object_name = 'Sphere'
plt.figure()
plt.plot(-hello[300:,0],-hello[300:,1])
plt.show()
hello = numpy.array(hello[300:523+300,[0,1]])
hello[:,0] = -(hello[:,0]-hello[0,0])/2.0
hello[:,1] = -(hello[:,1]-hello[0,1])/2.0
print 'Program started'
vrep.simxFinish(-1) # just in case, close all opened connections
clientID=vrep.simxStart('127.0.0.1',19999,True,True,5000,5)
if clientID!=-1:
print 'Connected to remote API server'
res,objs=vrep.simxGetObjects(clientID,vrep.sim_handle_all,vrep.simx_opmode_oneshot_wait)
if res==vrep.simx_return_ok:
print 'Number of objects in the scene: ',len(objs)
res,v0=vrep.simxGetObjectHandle(clientID,object_name,vrep.simx_opmode_oneshot_wait)
print "Ok, I'm in!"
# Reads the pen position X,Y,Z
res,pos=vrep.simxGetObjectPosition(clientID,v0,vrep.sim_handle_parent,vrep.simx_opmode_oneshot_wait)
print "Initial Position", pos
# sys.exit()
i = 0
for hi in hello:
time.sleep(0.05)
cmd_pos = numpy.array(pos)+numpy.concatenate([hi,[0]]) # Sums X and Y
print "Cmd Position: ", i, hi, cmd_pos
i+=1
# Sets the new position
res = vrep.simxSetObjectPosition(clientID,v0,vrep.sim_handle_parent,cmd_pos,vrep.simx_opmode_oneshot_wait)
if res!=0:
vrep.simxFinish(clientID)
print 'Remote API function call returned with error code: ',res
break
cmd_pos = numpy.array(pos)+numpy.concatenate([hello[-1],[0.05]]) # lift the pen
res = vrep.simxSetObjectPosition(clientID,v0,vrep.sim_handle_parent,cmd_pos,vrep.simx_opmode_oneshot_wait)
time.sleep(0.05)
final_pos = numpy.concatenate([[-0.6019,0.2206],[cmd_pos[2]]])
dif_pos = final_pos - cmd_pos
dif_pos = dif_pos/10.0
for i in range(10):
cmd_pos = cmd_pos+dif_pos#numpy.concatenate([[-0.6019,0.2206],[cmd_pos[2]]]) # lift the pen
res = vrep.simxSetObjectPosition(clientID,v0,vrep.sim_handle_parent,cmd_pos,vrep.simx_opmode_oneshot_wait)
time.sleep(0.05)
else:
print 'Remote API function call returned with error code: ',res
vrep.simxFinish(clientID)
else:
print 'Failed connecting to remote API server'
print 'Program ended'
|
ricardodeazambuja/BaxterHello_V-REP
|
v-rep_python/hello_writer.py
|
Python
|
cc0-1.0
| 2,546
|
"""0MQ polling related functions and classes."""
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import zmq
from zmq.backend import zmq_poll
from .constants import POLLIN, POLLOUT, POLLERR
#-----------------------------------------------------------------------------
# Polling related methods
#-----------------------------------------------------------------------------
class Poller(object):
"""A stateful poll interface that mirrors Python's built-in poll."""
sockets = None
_map = {}
def __init__(self):
self.sockets = []
self._map = {}
def __contains__(self, socket):
return socket in self._map
def register(self, socket, flags=POLLIN|POLLOUT):
"""p.register(socket, flags=POLLIN|POLLOUT)
Register a 0MQ socket or native fd for I/O monitoring.
register(s,0) is equivalent to unregister(s).
Parameters
----------
socket : zmq.Socket or native socket
A zmq.Socket or any Python object having a ``fileno()``
method that returns a valid file descriptor.
flags : int
The events to watch for. Can be POLLIN, POLLOUT or POLLIN|POLLOUT.
If `flags=0`, socket will be unregistered.
"""
if flags:
if socket in self._map:
idx = self._map[socket]
self.sockets[idx] = (socket, flags)
else:
idx = len(self.sockets)
self.sockets.append((socket, flags))
self._map[socket] = idx
elif socket in self._map:
# uregister sockets registered with no events
self.unregister(socket)
else:
# ignore new sockets with no events
pass
def modify(self, socket, flags=POLLIN|POLLOUT):
"""Modify the flags for an already registered 0MQ socket or native fd."""
self.register(socket, flags)
def unregister(self, socket):
"""Remove a 0MQ socket or native fd for I/O monitoring.
Parameters
----------
socket : Socket
The socket instance to stop polling.
"""
idx = self._map.pop(socket)
self.sockets.pop(idx)
# shift indices after deletion
for socket, flags in self.sockets[idx:]:
self._map[socket] -= 1
def poll(self, timeout=None):
"""Poll the registered 0MQ or native fds for I/O.
Parameters
----------
timeout : float, int
The timeout in milliseconds. If None, no `timeout` (infinite). This
is in milliseconds to be compatible with ``select.poll()``.
Returns
-------
events : list of tuples
The list of events that are ready to be processed.
This is a list of tuples of the form ``(socket, event)``, where the 0MQ Socket
or integer fd is the first element, and the poll event mask (POLLIN, POLLOUT) is the second.
It is common to call ``events = dict(poller.poll())``,
which turns the list of tuples into a mapping of ``socket : event``.
"""
if timeout is None or timeout < 0:
timeout = -1
elif isinstance(timeout, float):
timeout = int(timeout)
return zmq_poll(self.sockets, timeout=timeout)
def select(rlist, wlist, xlist, timeout=None):
"""select(rlist, wlist, xlist, timeout=None) -> (rlist, wlist, xlist)
Return the result of poll as a lists of sockets ready for r/w/exception.
This has the same interface as Python's built-in ``select.select()`` function.
Parameters
----------
timeout : float, int, optional
The timeout in seconds. If None, no timeout (infinite). This is in seconds to be
compatible with ``select.select()``.
rlist : list of sockets/FDs
sockets/FDs to be polled for read events
wlist : list of sockets/FDs
sockets/FDs to be polled for write events
xlist : list of sockets/FDs
sockets/FDs to be polled for error events
Returns
-------
(rlist, wlist, xlist) : tuple of lists of sockets (length 3)
Lists correspond to sockets available for read/write/error events respectively.
"""
if timeout is None:
timeout = -1
# Convert from sec -> us for zmq_poll.
# zmq_poll accepts 3.x style timeout in ms
timeout = int(timeout*1000.0)
if timeout < 0:
timeout = -1
sockets = []
for s in set(rlist + wlist + xlist):
flags = 0
if s in rlist:
flags |= POLLIN
if s in wlist:
flags |= POLLOUT
if s in xlist:
flags |= POLLERR
sockets.append((s, flags))
return_sockets = zmq_poll(sockets, timeout)
rlist, wlist, xlist = [], [], []
for s, flags in return_sockets:
if flags & POLLIN:
rlist.append(s)
if flags & POLLOUT:
wlist.append(s)
if flags & POLLERR:
xlist.append(s)
return rlist, wlist, xlist
#-----------------------------------------------------------------------------
# Symbols to export
#-----------------------------------------------------------------------------
__all__ = [ 'Poller', 'select' ]
|
dash-dash/pyzmq
|
zmq/sugar/poll.py
|
Python
|
bsd-3-clause
| 5,324
|
from ledstripcontroller import LedStripController
from encoder import Encoder
def main():
enc = Encoder(pin_clk=13, pin_dt=12, pin_mode=Pin.PULL_UP,
min_val=40, max_val=1020, clicks=1, accel=5)
enc._value = 1020
controller = LedStripController(enc)
controller.run()
if __name__ == '__main__':
main()
|
HowManyOliversAreThere/led_strip
|
main.py
|
Python
|
mit
| 343
|
import os
import platform
import unittest
import sys
import time
try:
from tests_pydevd_python import debugger_unittest
except:
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
IS_CPYTHON = platform.python_implementation() == 'CPython'
IS_PY36 = sys.version_info[0] == 3 and sys.version_info[1] == 6
TEST_CYTHON = os.getenv('PYDEVD_USE_CYTHON', None) == 'YES'
class WriterThreadStepAndResume(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py')
def run(self):
self.start_socket()
self.write_add_breakpoint(10, 'Method2')
self.write_add_breakpoint(2, 'Method1')
self.write_make_initial_run()
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 10, 'Expected return to be in line 10, was: %s' % line
assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type
self.write_step_over(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('108', True)
assert line == 11, 'Expected return to be in line 11, was: %s' % line
# we use tracing debugger while stepping
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_run_thread(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 2, 'Expected return to be in line 2, was: %s' % line
# we enable frame evaluation debugger after "Resume" command
assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type
self.write_run_thread(thread_id)
self.finished_ok = True
class WriterThreadStepReturn(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case56.py')
def run(self):
self.start_socket()
self.write_add_breakpoint(2, 'Call2')
self.write_make_initial_run()
thread_id, frame_id, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type()
assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type
self.write_get_frame(thread_id, frame_id)
self.write_step_return(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('109', True)
assert line == 8, 'Expecting it to go to line 8. Went to: %s' % line
# Step return uses temporary breakpoint, so we use tracing debugger
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_step_in(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('107', True)
# goes to line 4 in jython (function declaration line)
assert line in (4, 5), 'Expecting it to go to line 4 or 5. Went to: %s' % line
# we use tracing debugger for stepping
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_run_thread(thread_id)
self.finished_ok = True
class WriterThreadAddLineBreakWhileRun(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case3.py')
def run(self):
self.start_socket()
self.write_make_initial_run()
time.sleep(.5)
breakpoint_id = self.write_add_breakpoint(4, '')
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 4, 'Expected return to be in line 4, was: %s' % line
# we use tracing debugger if breakpoint was added while running
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_get_frame(thread_id, frame_id)
self.write_run_thread(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 4, 'Expected return to be in line 4, was: %s' % line
# we still use tracing debugger
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_get_frame(thread_id, frame_id)
self.write_remove_breakpoint(breakpoint_id)
self.write_run_thread(thread_id)
self.finished_ok = True
class WriterThreadExceptionBreak(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py')
def run(self):
self.start_socket()
self.write_add_breakpoint(10, 'Method2')
self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0")
self.write_make_initial_run()
time.sleep(.5)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 10, 'Expected return to be in line 10, was: %s' % line
# we use tracing debugger if there are exception breakpoints
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_run_thread(thread_id)
self.finished_ok = True
class WriterThreadAddExceptionBreakWhileRunning(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py')
def run(self):
self.start_socket()
self.write_add_breakpoint(10, 'Method2')
self.write_add_breakpoint(2, 'Method1')
# self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0")
self.write_make_initial_run()
time.sleep(.5)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 10, 'Expected return to be in line 10, was: %s' % line
# we use tracing debugger if there are exception breakpoints
assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type
self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0")
self.write_run_thread(thread_id)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 2, 'Expected return to be in line 2, was: %s' % line
# we use tracing debugger if exception break was added
assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type
self.write_run_thread(thread_id)
self.finished_ok = True
class WriterThreadAddTerminationExceptionBreak(debugger_unittest.AbstractWriterThread):
TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py')
def run(self):
self.start_socket()
self.write_add_breakpoint(10, 'Method2')
self.write_add_exception_breakpoint_with_policy('IndexError', "0", "1", "0")
self.write_make_initial_run()
time.sleep(.5)
thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True)
assert line == 10, 'Expected return to be in line 10, was: %s' % line
# we can use frame evaluation with exception breakpoint with "On termination" suspend policy
assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type
self.write_run_thread(thread_id)
self.finished_ok = True
@unittest.skipIf(not IS_PY36 or not IS_CPYTHON or not TEST_CYTHON, reason='Test requires Python 3.6')
class TestFrameEval(unittest.TestCase, debugger_unittest.DebuggerRunner):
def get_command_line(self):
return [sys.executable, '-u']
def test_step_and_resume(self):
self.check_case(WriterThreadStepAndResume)
def test_step_return(self):
self.check_case(WriterThreadStepReturn)
def test_add_break_while_running(self):
self.check_case(WriterThreadAddLineBreakWhileRun)
def test_exc_break(self):
self.check_case(WriterThreadExceptionBreak)
def test_add_exc_break_while_running(self):
self.check_case(WriterThreadAddExceptionBreakWhileRunning)
def test_add_termination_exc_break(self):
self.check_case(WriterThreadAddTerminationExceptionBreak)
|
goodwinnk/intellij-community
|
python/helpers/pydev/tests_pydevd_python/test_frame_eval_and_tracing.py
|
Python
|
apache-2.0
| 8,663
|
from .InfoJobs import InfoJobs
|
diego-bernardes/PyTIJobs
|
sites/__init__.py
|
Python
|
gpl-3.0
| 31
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Entry'
db.create_table('blog_entry', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=255, db_index=True)),
('featured', self.gf('django.db.models.fields.BooleanField')(default=False)),
('pub_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('status', self.gf('django.db.models.fields.IntegerField')(default=2)),
('excerpt', self.gf('django.db.models.fields.TextField')(blank=True)),
('body', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('blog', ['Entry'])
# Adding M2M table for field site on 'Entry'
db.create_table('blog_entry_site', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('entry', models.ForeignKey(orm['blog.entry'], null=False)),
('site', models.ForeignKey(orm['sites.site'], null=False))
))
db.create_unique('blog_entry_site', ['entry_id', 'site_id'])
def backwards(self, orm):
# Deleting model 'Entry'
db.delete_table('blog_entry')
# Removing M2M table for field site on 'Entry'
db.delete_table('blog_entry_site')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blog.entry': {
'Meta': {'ordering': "['-pub_date']", 'object_name': 'Entry'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'excerpt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'site': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['blog']
|
vikingco/django-blog
|
blog/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 6,353
|
# -*- coding: utf-8 -*-
# parts of pygchem (Python interface for GEOS-Chem Chemistry Transport Model)
#
# Copyright (C) 2013-2014 Christoph Keller, Benoît Bovy
# see license.txt for more details
#
"""
Read / Write Harvard-NASA Emissions Component (HEMCO) settings files.
"""
import re
import itertools
from types import StringTypes
import numpy as np
_cfg_section_wrap = '#' * 84
_cfg_comment = '#'
_cfg_extsetting_prefix = '--> '
# specific rules to convert a given string in the expected type and vice-versa
def _read_rule_srcdim(s):
if s not in ['xy', 'xyz']:
raise ValueError("invalid srcDim '{0}'".format(s))
return len(s)
def _read_rule_ext_setting(s):
if s in ['true', 'false']:
return bool(s.title())
try:
return int(s)
except Exception:
pass
try:
return float(s)
except Exception:
pass
return s
_cfg_read_rules = {
'try_none': lambda s: None if s == '-' else s,
'int': lambda s: int(s),
'try_bool': lambda s: bool(s.title()) if s in ['true', 'false'] else s,
'on_off': lambda s: True if s == 'on' else False,
'ScalIDs': lambda s: [] if s == '-' else [int(i) for i in s.split('/')],
'srcDim': _read_rule_srcdim,
'Species': lambda s: [] if s == '-' else [sp for sp in s.split('/')],
'mwindow': lambda s: [int(i) for i in s.split('/')],
'Oper': lambda s: s,
'ext_setting_name': lambda s: s.replace('--> ', ''),
'ext_setting_val': _read_rule_ext_setting,
}
_cfg_write_rules = {
'try_none': lambda v: '-' if v is None else str(v),
'int': lambda v: str(v),
'try_bool': lambda v: str(v).lower() if v is bool else str(v),
'on_off': lambda v: 'on' if v else 'off',
'ScalIDs': lambda v: '/'.join((str(i) for i in v)) if len(v) else '-',
'srcDim': lambda v: 'xyz'[:v],
'Species': lambda v: '/'.join((str(sp) for sp in v)) if len(v) else '-',
'mwindow': lambda v: '/'.join((str(i) for i in v)),
'ext_setting_name': lambda v: ' --> {0}'.format(v),
'ext_setting_val': lambda v: str(v).lower() if v is bool else str(v),
}
# field separators per section
_cfg_fields_sep = {
'SETTINGS': ':',
'BASE EMISSIONS': ' ',
'SCALE FACTORS': ' ',
'MASKS': ' ',
'EXTENSION SWITCHES': (':', ' '), # nested splitting (':' then ' ')
'EXTENSION SWITCHES SETTINGS': ':',
'EXTENSION DATA': ' ',
}
# line fields specifications as a list of (field name, conversion rule),
# per section
_cfg_fields_spec = {
'SETTINGS': (
('key', None),
('value', 'try_bool'),
),
'BASE EMISSIONS': (
('eid', 'int'),
('name', None),
('filename', 'try_none'),
('var_name', 'try_none'),
('timeslicer', 'try_none'),
('ndim', 'srcDim'),
('units', None),
('species', None),
('fids', 'ScalIDs'),
('category', 'int'),
('hierarchy', 'int')
),
'SCALE FACTORS': (
('fid', 'int'),
('name', None),
('filename', 'try_none'),
('var_name', 'try_none'),
('timeslicer', 'try_none'),
('ndim', 'srcDim'),
('units', None),
('operator', 'int')
),
'MASKS': (
('fid', 'int'),
('name', None),
('filename', 'try_none'),
('var_name', 'try_none'),
('timeslicer', 'try_none'),
('ndim', 'srcDim'),
('units', None),
('operator', 'int'),
('mask_window', 'mwindow')
),
'EXTENSION SWITCHES': (
('eid', 'int'),
('name', None),
('enabled', 'on_off'),
('species', 'Species')
),
'EXTENSION SWITCHES SETTINGS': (
('key', 'ext_setting_name'),
('value', 'ext_setting_val'),
),
}
_cfg_fields_spec['EXTENSION DATA'] = _cfg_fields_spec['BASE EMISSIONS']
def _get_sections_lines(filename):
"""
Returns a dictionary containing, for each config section, a list of
(line number, raw line content) representing all configuration lines
to be parsed (comment lines and empty lines excluded).
"""
with open(filename) as cfg_file:
lines = {None: []}
section = None
line_number = 0
for line in cfg_file:
line_number += 1
line = line.strip()
if not line or line.startswith(_cfg_comment):
continue
if 'BEGIN SECTION' in line:
section = line.replace('BEGIN SECTION ', '')
if not section in lines.keys():
lines[section] = []
continue
if 'END SECTION' in line:
section = None
continue
lines[section].append((line_number, line))
if len(lines[None]):
raise IOError("Error while reading the HEMCO settings file '{0}': "
"some settings are defined outside of 'SECTION' blocks"
.format(filename))
return lines
def _parse_line(line, fields_spec, fields_sep=' '):
"""
Parse the line `line` of the configuration file, given the field
specifications `fields_spec` and the field split character(s) `fields_sep`.
Returns the fields as a dictionary.
"""
# recursive line split (handle the case of EXTENSION SWITCHES)
if isinstance(fields_sep, StringTypes):
fields_sep = [fields_sep]
else:
fields_sep = list(fields_sep)
def recursive_split(strs, seps):
# regex (exclude any whitespace(s), tab(s)... after separator)
sep_re = seps.pop(0) + '[\s]*'
splitted_strs = itertools.chain.from_iterable(
re.split(sep_re, s) for s in strs
)
splitted_strs = [s.strip() for s in splitted_strs]
if len(seps):
return recursive_split(splitted_strs, seps)
return splitted_strs
fields_str = recursive_split([line], fields_sep)
if len(fields_str) != len(fields_spec):
raise ValueError("invalid line format ('{0}')".format(fields_str))
fields = {}
for value, spec in zip(fields_str, fields_spec):
key, rule_name = spec
if rule_name is not None:
rule = _cfg_read_rules.get(rule_name)
value = rule(value)
fields[key] = value
return fields
def _parse_section_lines(lines, section, filename,
alt_fields_spec=None, alt_fields_sep=None):
"""parse lines for a given section and return a list of records (dicts)."""
records = []
fields_spec = _cfg_fields_spec[section]
fields_sep = _cfg_fields_sep[section]
def try_parse(ln, l, spec, sep):
try:
records.append(_parse_line(l, spec, sep))
except Exception as e:
msg = e.args[0]
raise IOError("Error while reading the HEMCO settings file '{0}' "
"at line {1}: {2}"
.format(filename, ln, msg))
for (line_number, line) in lines[section]:
try:
try_parse(line_number, line, fields_spec, fields_sep)
except IOError:
# try the alternative field specifications and separator(s) (if
# provided) before raise the error (needed for extension settings)
if alt_fields_sep is not None and alt_fields_spec is not None:
try_parse(line_number, line, alt_fields_spec, alt_fields_sep)
return records
def _add_datafield(efields):
"""
Add the 'datafield' key (with appropriate value) for all given emission
fields.
"""
for ef in efields:
if not len(re.sub('[0-9.\-eE/ \t]+', '', ef['filename'])):
vals = [float(f) for f in ef['filename'].split('/')]
if len(vals) == 1:
ef['datafield'] = np.array(vals[0])
else:
ef['datafield'] = np.array(vals)
ef['filename'] = None
else:
ef['datafield'] = None
def read_hemco(filename):
"""
Read a Harvard-NASA Emissions Component (HEMCO) settings file.
Parameters
----------
filename : string
name of (path to) the HEMCO settings file.
Returns
-------
settings
A dictionary with global HEMCO settings.
base_emission_fields
A list of base emission fields (metadata as dictionaries).
scale_factors
A list of scale factors (metadata as dictionaries).
masks
A list of masks metadata (metadata as dictionaries).
extensions
A list of HEMCO extensions (metadata as dictionaries).
extension_data
A list of extension data fields (metadata as dictionaries).
"""
cfg_lines = _get_sections_lines(filename)
base_emission_fields = _parse_section_lines(cfg_lines, 'BASE EMISSIONS',
filename)
scale_factors = _parse_section_lines(cfg_lines, 'SCALE FACTORS', filename)
masks = _parse_section_lines(cfg_lines, 'MASKS', filename)
extension_data = _parse_section_lines(cfg_lines, 'EXTENSION DATA', filename)
_add_datafield(base_emission_fields + scale_factors +
masks + extension_data)
# settings
settings_list = _parse_section_lines(cfg_lines, 'SETTINGS', filename)
settings = dict((s['key'], s['value']) for s in settings_list)
# extensions
extensions = _parse_section_lines(
cfg_lines, 'EXTENSION SWITCHES', filename,
alt_fields_spec=_cfg_fields_spec['EXTENSION SWITCHES SETTINGS'],
alt_fields_sep=_cfg_fields_sep['EXTENSION SWITCHES SETTINGS']
)
ext_setting_keys = set(
f[0] for f in _cfg_fields_spec['EXTENSION SWITCHES SETTINGS']
)
last_ext = None
keep_idx = []
for idx, ext in enumerate(extensions):
if set(ext.keys()) == ext_setting_keys:
last_ext['settings'][ext['key']] = ext['value']
else:
keep_idx.append(idx)
last_ext = ext
last_ext['settings'] = dict()
extensions = [extensions[idx] for idx in keep_idx]
return (settings, base_emission_fields, scale_factors, masks,
extensions, extension_data)
# TODO: write_hemcp functions
# def write_config_file(emis_setup, filename, style='HEMCO'):
# """
# Write emission setup into file for re-use later on.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (to be written into file).
# filename : (string)
# File name (full path) of HEMCO configuration file
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
#
# TODO: Add ESMF writing capability.
# """
#
# # open file
# outfile = open(filename, 'w')
# outfile.write('### HEMCO INPUT FILE ###\n')
# outfile.write(' \n')
#
# # write file
# write_settings(emis_setup, outfile, style)
# write_base_emissions(emis_setup, outfile, style)
# write_scale_factors(emis_setup, outfile, style)
# write_masks(emis_setup, outfile, style)
# write_extensions(emis_setup, outfile, style)
#
# # close file
# outfile.write('### END OF HEMCO INPUT FILE ###\n')
# outfile.close()
#
#
# def add_header(outfile, section):
# outfile.write('\n')
# outfile.write(_cfg_section_wrap + '\n')
# outfile.write('BEGIN SECTION ' + section + '\n')
# outfile.write(_cfg_section_wrap + '\n')
#
#
# def add_footer(outfile, section):
# outfile.write('\n')
# outfile.write('END SECTION ' + section + '\n')
#
#
# def field2file(field, outfile, emis_setup, style, extension=None,
# prevFile=None, prevVar=None, prevTime=None):
# """
# Writes a GCField object (field) to a configuration file.
#
# Parameters
# ----------
# field : :class:`GCField` object.
# field that shall be written to file.
# outfile :
# Already opened file to write into.
# emis_setup : :class:`Emissions` object.
# The emissions setup object (to be written into file).
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
# extension : :class:`EmissionExt` object.
# The extension the field belongs to. None for scale factors.
# prevFile, prevVar, prevTime : (optional) file name, variable and
# time stamp of the previously written file. If the attributes
# of the passed field match these attributes, all file values
# (name, variable, time, unit, dimension) are set to invalid
# value (-). This causes HEMCO to use the same file data object
# for all fields.
# History
# ----------
# 20140224 ckeller: Initial version
# 20140517 ckeller: Added previous file attributes
# """
#
# # base field or scale field?
# if pyhemco.emissions.BEF_ATTR_NAME in field.attributes.keys():
# isBase = True
# attrs = field.attributes[pyhemco.emissions.BEF_ATTR_NAME]
# else:
# isBase = False
# attrs = field.attributes[pyhemco.emissions.SF_ATTR_NAME]
#
# # extract information to be written into file
#
# # field name
# fldname = str(field.name)
#
# # source file information
# srcfile = str(field.filename)
# srcvar = str(field.var_name)
# srcunit = str(field.unit)
# srctime = str(attrs['timestamp'])
#
# # eventually write data directly into file:
# if srcfile == '-':
# srcfile = '/'.join([str(i) for i in list(field.data)])
# srctime = '-'
# srcvar = '-'
#
# # data dimension
# if field.ndim == 2:
# srcdim = 'xy'
# elif field.ndim == 3:
# srcdim = 'xyz'
# else:
# raise ValueError('Illegal source dimension in ' + fldname)
#
# # if file information are equivalent to the previous ones (passed as
# # arguments), set all file information to invalid values (-). HEMCO
# # will then use the same file data object for all emission fields.
# if srcfile == prevFile and srcvar == prevVar and srctime == prevTime:
# srcfile = '-'
# srcvar = '-'
# srctime = '-'
# srcdim = '-'
# srcunit = '-'
#
# # BASE FIELDS
# if isBase:
# fid = str(extension.eid)
# spec = str(attrs['species'])
# cat = str(attrs['category'])
# hier = str(attrs['hierarchy'])
#
# scalIDs = [str(scal.attributes[pyhemco.emissions.SF_ATTR_NAME]['fid'])
# for scal in attrs['scale_factors']]
# if len(scalIDs) > 0:
# scalIDs = '/'.join(scalIDs)
# else:
# scalIDs = '-'
#
# fldstr = ' '.join(
# [fid, fldname, srcfile, srcvar, srctime, srcdim, srcunit, spec,
# scalIDs, cat, hier])
#
# # SCALE FACTORS / MASKS
# else:
# fid = str(attrs['fid'])
# oper = str(attrs['operator'])
# if 'mul' in oper:
# oper = '1'
# elif 'div' in oper:
# oper = '-1'
# elif 'sqr' in oper:
# oper = '2'
# fldstr = ' '.join(
# [fid, fldname, srcfile, srcvar, srctime, srcdim, srcunit, oper])
#
# # for masks:
# if 'mask_window' in attrs.keys():
# data = '/'.join(str(i) for i in attrs['mask_window'])
# fldstr = ' '.join([fldstr, data])
#
# # write to file
# outfile.write(fldstr + '\n')
#
#
# def write_settings(emis_setup, outfile, style):
# """
# Write emission setup into a configuration file.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (to be written into file).
# outfile :
# Already opened file to write into.
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
# """
#
# add_header(outfile, 'SETTINGS')
# core_ext = emis_setup.extensions.get_object(name='Core')
#
# for k, v in core_ext.settings.items():
# outfile.write(str(k) + ': ' + str(v) + '\n')
#
# add_footer(outfile, 'SETTINGS')
#
#
# def write_base_emissions(emis_setup, outfile, style):
# """
# Write base emission information into a configuration file.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (from which information is taken from).
# outfile :
# Already opened file to write into.
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
# """
#
# add_header(outfile, 'BASE EMISSIONS')
# outfile.write(
# '# ExtNr Name sourceFile sourceVar sourceTime SrcDim SrcUnit Species ScalIDs Cat Hier\n')
#
# core_ext = emis_setup.extensions.get_object(name='Core')
# prevFile = ''
# prevVar = ''
# prevTime = ''
# for iField in core_ext.base_emission_fields:
# field2file(iField, outfile, emis_setup, style, core_ext, prevFile,
# prevVar, prevTime)
# prevFile = str(iField.filename)
# prevVar = str(iField.var_name)
# prevTime = str(
# iField.attributes[pyhemco.emissions.BEF_ATTR_NAME]['timestamp'])
#
# add_footer(outfile, 'BASE EMISSIONS')
#
#
# def write_scale_factors(emis_setup, outfile, style):
# """
# Write scale factor information into a configuration file.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (from which information is taken from).
# outfile :
# Already opened file to write into.
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
# """
#
# add_header(outfile, 'SCALE FACTORS')
# outfile.write(
# '# ScalID Name sourceFile sourceVar sourceTime SrcDim SrcUnit Oper Scalar\n')
#
# for iField in emis_setup.scale_factors.sorted(
# key=lambda scal: scal.attributes[pyhemco.emissions.SF_ATTR_NAME][
# 'fid']):
# if not iField.is_mask():
# field2file(iField, outfile, emis_setup, style)
#
# add_footer(outfile, 'SCALE FACTORS')
#
#
# def write_masks(emis_setup, outfile, style):
# """
# Write mask information into a configuration file.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (from which information is taken from).
# outfile :
# Already opened file to write into.
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
# """
#
# add_header(outfile, 'MASKS')
# outfile.write(
# '# ScalID Name sourceFile sourceVar sourceTime SrcDim SrcUnit Oper Lon1/Lat1/Lon2/Lat2\n')
#
# for iField in emis_setup.scale_factors.sorted(
# key=lambda scal: scal.attributes[pyhemco.emissions.SF_ATTR_NAME][
# 'fid']):
# if iField.is_mask():
# field2file(iField, outfile, emis_setup, style)
#
# add_footer(outfile, 'MASKS')
#
#
# def write_extensions(emis_setup, outfile, style):
# """
# Writes extension information into a configuration file.
#
# Parameters
# ----------
# emis_setup : :class:`Emissions` object.
# The emissions setup object (from which information is taken from).
# outfile :
# Already opened file to write into.
# style : (string)
# file type. 'HEMCO' for a HEMCO-style file, 'ESMF' for
# an ESMF-style file.
#
# History
# ----------
# 20140224 ckeller: Initial version
# """
#
# # Extension switches
# add_header(outfile, 'EXTENSION SWITCHES')
# outfile.write('# ExtNr ExtName on/off Species\n')
# for iExt in emis_setup.extensions:
# # skip core extension: all settings already added to sections settings.
# if iExt.eid == 0:
# continue
# if iExt.enabled:
# onoff = 'on'
# else:
# onoff = 'off'
# species = '/'.join(iExt.species)
# fldstr = ' '.join([str(iExt.eid), str(iExt.name), ' :', onoff, species])
# outfile.write(fldstr + '\n')
# for k, v in iExt.settings.items():
# outfile.write(
# ' ' + _cfg_extsetting_prefix + str(k) + ': ' + str(v) + '\n')
# add_footer(outfile, 'EXTENSION SWITCHES')
#
# # Extension data
# add_header(outfile, 'EXTENSION DATA')
# outfile.write(
# '# ExtNr Name sourceFile sourceVar sourceTime SrcDim SrcUnit Species ScalIDs Cat Hier\n')
# for iExt in emis_setup.extensions:
# # skip core extension: all fields already added to section base emissions.
# if iExt.eid == 0:
# continue
# for iField in iExt.base_emission_fields:
# field2file(iField, outfile, emis_setup, style, iExt)
# add_footer(outfile, 'EXTENSION DATA')
#
#
# def strlist_to_fields(raw_vals, fields_spec, none_val=None):
# """
# Get fields from a list of strings, given fields specification.
#
# Parameters
# ----------
# raw_vals : [string, string, ...]
# Values of fields that have to be convert to the correct type.
# fields_spec : ((string, callable), (string, callable), ...)
# Sequence of 2-length tuples (name, type) defining the name
# and type - or any callable that return the expected type - for each
# field.
# none_val : string or other
# Identifies a None value in raw_vals.
#
# Returns
# -------
# tuple of 2-length tuples
# (field name, field value).
#
# """
# fields = []
# for val, spec in zip(raw_vals, field_spec):
# fname, ftype = spec
# if val == none_val:
# fields.append((fname, None))
# else:
# fields.append((fname, ftype(val)))
# return tuple(fields)
#
#
# def fields_to_strlist(fields, none_str=''):
# """
# Set a list of strings, given fields specification.
#
# Parameters
# ----------
# fields : ((string, val, callable), (string, val, callable), ...)
# (value, formatter) for each field. Formatter is a callable for
# converting the field value to a string.
# none_str : string
# None value format.
#
# Returns
# -------
# list of fields values as strings.
#
# """
# return [ffmt(fval) if fval is not None else none_str
# for fval, ffmt in fields]
|
benbovy/PyGChem
|
pygchem/io/hemco.py
|
Python
|
gpl-3.0
| 22,916
|
class Solution(object):
def countPrimes(self, n):
"""
:type n: int
:rtype: int
"""
"""
A naive way to solve the problem is to judge every number from
the range[m,n] isprime, isprime can be optimal take n^(1/2)
so the total is n^(1.5).
The better way is using space to mark the possible solution
"""
isprime=[True]*n
i=2
while i*i<n:
if not isprime[i]:
i+=1
continue
j=i*i
while j<n:
isprime[j]=False
j+=i
i+=1
cnt=0
for k in xrange(2,n):
if isprime[k]: cnt+=1
return cnt
|
Tanych/CodeTracking
|
204-Count-Primes/solution.py
|
Python
|
mit
| 751
|
"""Stores configuration in the database."""
import logging
from typing import Sequence
from plumeria.core.storage import migrations
from plumeria.transport import Server
from plumeria.core.scoped_config.manager import ScopedConfigProvider, ScopedValue
logger = logging.getLogger(__name__)
class DatabaseConfig(ScopedConfigProvider):
def __init__(self, pool):
self.pool = pool
async def init(self):
await migrations.migrate("config", __name__)
async def get_all(self, server: Server) -> Sequence[ScopedValue]:
results = []
async with self.pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute(
"SELECT transport, server, channel, section, `key`, value "
"FROM config_values "
"WHERE transport = %s AND server = %s",
(server.transport.id, server.id))
for row in await cur.fetchall():
row = list(row)
if not len(row[2]):
row[2] = None
results.append(ScopedValue(*row))
return results
async def save(self, sv: ScopedValue):
async with self.pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute(
"REPLACE INTO config_values "
"(transport, server, channel, section, `key`, value) "
"VALUES "
"(%s, %s, %s, %s, %s, %s)",
(sv.transport, sv.server, sv.channel if sv.channel is not None else "", sv.section, sv.key,
sv.value))
async def delete(self, sv: ScopedValue):
async with self.pool.acquire() as conn:
async with conn.cursor() as cur:
await cur.execute(
"DELETE FROM config_values "
"WHERE transport = %s AND server = %s AND channel = %s AND section = %s AND `key` = %s",
(sv.transport, sv.server, sv.channel if sv.channel is not None else "", sv.section, sv.key))
|
sk89q/Plumeria
|
plumeria/core/scoped_config/storage.py
|
Python
|
mit
| 2,133
|
#!/usr/bin/env python
'''
test_frontier.py - fairly narrow tests of frontier management, requires
rethinkdb running on localhost
Copyright (C) 2017-2018 Internet Archive
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import argparse
import datetime
import logging
import time
import doublethink
import pytest
import brozzler
args = argparse.Namespace()
args.log_level = logging.INFO
brozzler.cli.configure_logging(args)
def test_rethinkdb_up():
'''Checks that rethinkdb is listening and looks sane.'''
rr = doublethink.Rethinker(db='rethinkdb') # built-in db
tbls = rr.table_list().run()
assert len(tbls) > 10
def test_basics():
rr = doublethink.Rethinker(db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
job_conf = {'seeds': [
{'url': 'http://example.com'}, {'url': 'https://example.org/'}]}
job = brozzler.new_job(frontier, job_conf)
assert job.id
assert job.starts_and_stops
assert job.starts_and_stops[0]['start']
assert job == {
'id': job.id,
'conf': {
'seeds': [
{'url': 'http://example.com'},
{'url': 'https://example.org/'}
]
},
'status': 'ACTIVE',
'starts_and_stops': [
{
'start': job.starts_and_stops[0]['start'],
'stop': None
}
]
}
sites = sorted(list(frontier.job_sites(job.id)), key=lambda x: x.seed)
assert len(sites) == 2
assert sites[0].starts_and_stops[0]['start']
assert sites[1].starts_and_stops[0]['start']
assert sites[0] == {
'claimed': False,
'id': sites[0].id,
'job_id': job.id,
'last_claimed': brozzler.EPOCH_UTC,
'last_disclaimed': brozzler.EPOCH_UTC,
'scope': {'accepts': [{'ssurt': 'com,example,//http:/'}]},
'seed': 'http://example.com',
'starts_and_stops': [
{
'start': sites[0].starts_and_stops[0]['start'],
'stop': None
}
],
'status': 'ACTIVE'
}
assert sites[1] == {
'claimed': False,
'id': sites[1].id,
'job_id': job.id,
'last_claimed': brozzler.EPOCH_UTC,
'last_disclaimed': brozzler.EPOCH_UTC,
'scope': {'accepts': [{'ssurt': 'org,example,//https:/'}]},
'seed': 'https://example.org/',
'starts_and_stops': [
{
'start': sites[1].starts_and_stops[0]['start'],
'stop': None,
},
],
'status': 'ACTIVE',
}
pages = list(frontier.site_pages(sites[0].id))
assert len(pages) == 1
assert pages[0] == {
'brozzle_count': 0,
'claimed': False,
'hops_from_seed': 0,
'hops_off': 0,
'id': brozzler.Page.compute_id(sites[0].id, 'http://example.com'),
'job_id': job.id,
'needs_robots_check': True,
'priority': 1000,
'site_id': sites[0].id,
'url': 'http://example.com',
}
pages = list(frontier.site_pages(sites[1].id))
assert len(pages) == 1
assert pages[0] == {
'brozzle_count': 0,
'claimed': False,
'hops_from_seed': 0,
'hops_off': 0,
'id': brozzler.Page.compute_id(sites[1].id, 'https://example.org/'),
'job_id': job.id,
'needs_robots_check': True,
'priority': 1000,
'site_id': sites[1].id,
'url': 'https://example.org/',
}
# test "brozzled" parameter of frontier.site_pages
assert len(list(frontier.site_pages(sites[1].id))) == 1
assert len(list(frontier.site_pages(sites[1].id, brozzled=True))) == 0
assert len(list(frontier.site_pages(sites[1].id, brozzled=False))) == 1
pages[0].brozzle_count = 1
pages[0].save()
assert len(list(frontier.site_pages(sites[1].id))) == 1
assert len(list(frontier.site_pages(sites[1].id, brozzled=True))) == 1
assert len(list(frontier.site_pages(sites[1].id, brozzled=False))) == 0
pages[0].brozzle_count = 32819
pages[0].save()
assert len(list(frontier.site_pages(sites[1].id))) == 1
assert len(list(frontier.site_pages(sites[1].id, brozzled=True))) == 1
assert len(list(frontier.site_pages(sites[1].id, brozzled=False))) == 0
def test_resume_job():
'''
Tests that the right stuff gets twiddled in rethinkdb when we "start" and
"finish" crawling a job. Doesn't actually crawl anything.
'''
# vagrant brozzler-worker isn't configured to look at the "ignoreme" db
rr = doublethink.Rethinker(db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
job_conf = {'seeds': [{'url': 'http://example.com/'}]}
job = brozzler.new_job(frontier, job_conf)
assert len(list(frontier.job_sites(job.id))) == 1
site = list(frontier.job_sites(job.id))[0]
assert job.status == 'ACTIVE'
assert len(job.starts_and_stops) == 1
assert job.starts_and_stops[0]['start']
assert job.starts_and_stops[0]['stop'] is None
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 1
assert site.starts_and_stops[0]['start']
assert site.starts_and_stops[0]['stop'] is None
frontier.finished(site, 'FINISHED')
job.refresh()
assert job.status == 'FINISHED'
assert len(job.starts_and_stops) == 1
assert job.starts_and_stops[0]['start']
assert job.starts_and_stops[0]['stop']
assert job.starts_and_stops[0]['stop'] > job.starts_and_stops[0]['start']
assert site.status == 'FINISHED'
assert len(site.starts_and_stops) == 1
assert site.starts_and_stops[0]['start']
assert site.starts_and_stops[0]['stop']
assert site.starts_and_stops[0]['stop'] > site.starts_and_stops[0]['start']
frontier.resume_site(site)
job.refresh()
assert job.status == 'ACTIVE'
assert len(job.starts_and_stops) == 2
assert job.starts_and_stops[1]['start']
assert job.starts_and_stops[1]['stop'] is None
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 2
assert site.starts_and_stops[1]['start']
assert site.starts_and_stops[1]['stop'] is None
frontier.finished(site, 'FINISHED')
job.refresh()
assert job.status == 'FINISHED'
assert len(job.starts_and_stops) == 2
assert job.starts_and_stops[1]['start']
assert job.starts_and_stops[1]['stop']
assert job.starts_and_stops[1]['stop'] > job.starts_and_stops[1]['start']
assert site.status == 'FINISHED'
assert len(site.starts_and_stops) == 2
assert site.starts_and_stops[1]['start']
assert site.starts_and_stops[1]['stop']
assert site.starts_and_stops[1]['stop'] > site.starts_and_stops[1]['start']
# resuming a job == resuming all of its sites
frontier.resume_job(job)
site = list(frontier.job_sites(job.id))[0]
assert job.status == 'ACTIVE'
assert len(job.starts_and_stops) == 3
assert job.starts_and_stops[2]['start']
assert job.starts_and_stops[2]['stop'] is None
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 3
assert site.starts_and_stops[2]['start']
assert site.starts_and_stops[2]['stop'] is None
frontier.finished(site, 'FINISHED')
job.refresh()
assert job.status == 'FINISHED'
assert len(job.starts_and_stops) == 3
assert job.starts_and_stops[2]['start']
assert job.starts_and_stops[2]['stop']
assert job.starts_and_stops[2]['stop'] > job.starts_and_stops[2]['start']
assert site.status == 'FINISHED'
assert len(site.starts_and_stops) == 3
assert site.starts_and_stops[2]['start']
assert site.starts_and_stops[2]['stop']
assert site.starts_and_stops[2]['stop'] > site.starts_and_stops[2]['start']
frontier.resume_job(job)
site = list(frontier.job_sites(job.id))[0]
assert job.status == 'ACTIVE'
assert len(job.starts_and_stops) == 4
assert job.starts_and_stops[3]['start']
assert job.starts_and_stops[3]['stop'] is None
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 4
assert site.starts_and_stops[3]['start']
assert site.starts_and_stops[3]['stop'] is None
# simulate a job stop request
job_conf = {'seeds': [{'url': 'http://example.com/'}, {'url': 'http://example_2.com/'}]}
job = brozzler.new_job(frontier, job_conf)
assert len(list(frontier.job_sites(job.id))) == 2
site1 = list(frontier.job_sites(job.id))[0]
site2 = list(frontier.job_sites(job.id))[1]
job.stop_requested = datetime.datetime.utcnow().replace(tzinfo=doublethink.UTC)
job.save()
# should raise a CrawlStopped
with pytest.raises(brozzler.CrawlStopped):
frontier.honor_stop_request(site1)
frontier.finished(site1, 'FINISHED_STOP_REQUESTED')
frontier.finished(site2, 'FINISHED_STOP_REQUESTED')
job.refresh()
assert job.status == 'FINISHED'
assert job.stop_requested
assert len(job.starts_and_stops) == 1
assert job.starts_and_stops[0]['start']
assert job.starts_and_stops[0]['stop']
assert job.starts_and_stops[0]['stop'] > job.starts_and_stops[0]['start']
assert site1.status == 'FINISHED_STOP_REQUESTED'
assert site2.status == 'FINISHED_STOP_REQUESTED'
assert len(site1.starts_and_stops) == 1
assert len(site2.starts_and_stops) == 1
assert site1.starts_and_stops[0]['start']
assert site1.starts_and_stops[0]['stop']
assert site1.starts_and_stops[0]['stop'] > site.starts_and_stops[0]['start']
assert site2.starts_and_stops[0]['start']
assert site2.starts_and_stops[0]['stop']
assert site2.starts_and_stops[0]['stop'] > site.starts_and_stops[0]['start']
# simulate job resume after a stop request
frontier.resume_job(job)
site1 = list(frontier.job_sites(job.id))[0]
site2 = list(frontier.job_sites(job.id))[1]
assert job.status == 'ACTIVE'
assert job.stop_requested is None
assert len(job.starts_and_stops) == 2
assert job.starts_and_stops[1]['start']
assert job.starts_and_stops[1]['stop'] is None
assert site1.status == 'ACTIVE'
assert len(site1.starts_and_stops) == 2
assert site1.starts_and_stops[1]['start']
assert site1.starts_and_stops[1]['stop'] is None
assert site2.status == 'ACTIVE'
assert len(site2.starts_and_stops) == 2
assert site2.starts_and_stops[1]['start']
assert site2.starts_and_stops[1]['stop'] is None
# simulate a site stop request
site1.stop_requested = datetime.datetime.utcnow().replace(tzinfo=doublethink.UTC)
site1.save()
# should not raise a CrawlStopped
frontier.honor_stop_request(site2)
frontier.finished(site1, 'FINISHED_STOP_REQUESTED')
job.refresh()
assert job.status == 'ACTIVE'
assert job.stop_requested is None
assert len(job.starts_and_stops) == 2
assert job.starts_and_stops[1]['start']
assert job.starts_and_stops[1]['stop'] is None
assert site1.status == 'FINISHED_STOP_REQUESTED'
assert len(site1.starts_and_stops) == 2
assert site1.starts_and_stops[1]['start']
assert site1.starts_and_stops[1]['stop']
assert site1.starts_and_stops[1]['stop'] > site.starts_and_stops[1]['start']
assert site2.status == 'ACTIVE'
assert len(site2.starts_and_stops) == 2
assert site2.starts_and_stops[1]['start']
assert site2.starts_and_stops[1]['stop'] is None
# simulate site resume after a stop request
frontier.resume_site(site1)
site1 = list(frontier.job_sites(job.id))[0]
site2 = list(frontier.job_sites(job.id))[1]
assert job.status == 'ACTIVE'
assert job.stop_requested is None
assert len(job.starts_and_stops) == 2
assert job.starts_and_stops[1]['start']
assert job.starts_and_stops[1]['stop'] is None
assert site1.status == 'ACTIVE'
assert site1.stop_requested is None
assert len(site1.starts_and_stops) == 3
assert site1.starts_and_stops[2]['start']
assert site1.starts_and_stops[2]['stop'] is None
assert site2.status == 'ACTIVE'
assert len(site2.starts_and_stops) == 2
assert site2.starts_and_stops[1]['start']
assert site2.starts_and_stops[1]['stop'] is None
def test_time_limit():
# XXX test not thoroughly adapted to change in time accounting, since
# starts_and_stops is no longer used to enforce time limits
# vagrant brozzler-worker isn't configured to look at the "ignoreme" db
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
site = brozzler.Site(rr, {'seed':'http://example.com/', 'time_limit':99999})
brozzler.new_site(frontier, site)
site.refresh() # get it back from the db
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 1
assert site.starts_and_stops[0]['start']
assert site.starts_and_stops[0]['stop'] is None
frontier.finished(site, 'FINISHED')
assert site.status == 'FINISHED'
assert len(site.starts_and_stops) == 1
assert site.starts_and_stops[0]['start']
assert site.starts_and_stops[0]['stop']
assert site.starts_and_stops[0]['stop'] > site.starts_and_stops[0]['start']
frontier.resume_site(site)
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 2
assert site.starts_and_stops[1]['start']
assert site.starts_and_stops[1]['stop'] is None
# no time limit set
frontier.enforce_time_limit(site)
site.time_limit = 10
site.claimed = True
site.save()
# time limit not reached yet
frontier.enforce_time_limit(site)
assert site.status == 'ACTIVE'
assert len(site.starts_and_stops) == 2
assert site.starts_and_stops[1]['start']
assert site.starts_and_stops[1]['stop'] is None
site.time_limit = 0.1
time.sleep(0.1)
with pytest.raises(brozzler.ReachedTimeLimit):
frontier.enforce_time_limit(site)
def test_field_defaults():
rr = doublethink.Rethinker('localhost', db='ignoreme')
# page
brozzler.Page.table_ensure(rr)
page = brozzler.Page(rr, {'hops_from_seed': 3})
assert page.hops_from_seed == 3
assert page.id
assert page.brozzle_count == 0
page.save()
assert page.hops_from_seed == 3
assert page.id
assert page.brozzle_count == 0
qage = brozzler.Page.load(rr, page.id)
assert qage.hops_from_seed == 3
assert qage.id == page.id
assert qage.brozzle_count == 0
qage.save()
assert qage.hops_from_seed == 3
assert qage.id == page.id
assert qage.brozzle_count == 0
qage.refresh()
assert qage.hops_from_seed == 3
assert qage.id == page.id
assert qage.brozzle_count == 0
# site
brozzler.Site.table_ensure(rr)
site = brozzler.Site(rr, {'seed': 'http://example.com/'})
assert site.id is None
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/'}]}
site.save()
assert site.id
assert site.scope
tite = brozzler.Site.load(rr, site.id)
assert tite.id == site.id
assert tite.scope == site.scope
tite.save()
assert tite.id == site.id
assert tite.scope == site.scope
tite.refresh()
assert tite.id == site.id
assert tite.scope == site.scope
# job
brozzler.Job.table_ensure(rr)
job = brozzler.Job(rr, {'status': 'WHUUUT'})
assert job.status == 'WHUUUT'
assert job.id is None
assert job.starts_and_stops
job.save()
assert job.status == 'WHUUUT'
assert job.id
assert job.starts_and_stops
kob = brozzler.Job.load(rr, job.id)
assert kob.status == 'WHUUUT'
assert kob.id
assert kob.starts_and_stops
kob.save()
assert kob.status == 'WHUUUT'
assert kob.id
assert kob.starts_and_stops
kob.refresh()
assert kob.status == 'WHUUUT'
assert kob.id
assert kob.starts_and_stops
def test_scope_and_schedule_outlinks():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
site = brozzler.Site(rr, {'seed':'http://example.com/'})
parent_page = brozzler.Page(rr, {
'hops_from_seed': 1, 'url': 'http://example.com/whatever'})
outlinks = [
'https://example.com/',
'https://example.com/foo',
'http://example.com/bar',
'HTtp://exAMPle.COm/bar',
'HTtp://exAMPle.COm/BAr',
'HTtp://exAMPle.COm/BAZZZZ',]
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert sorted(parent_page.outlinks['rejected']) == [
'https://example.com/', 'https://example.com/foo']
assert sorted(parent_page.outlinks['accepted']) == [
'http://example.com/BAZZZZ', 'http://example.com/BAr',
'http://example.com/bar']
assert parent_page.outlinks['blocked'] == []
pp = brozzler.Page.load(rr, parent_page.id)
assert pp == parent_page
for url in parent_page.outlinks['rejected']:
id = brozzler.Page.compute_id(site.id, url)
assert brozzler.Page.load(rr, id) is None
for url in parent_page.outlinks['accepted']:
id = brozzler.Page.compute_id(site.id, url)
assert brozzler.Page.load(rr, id)
def test_parent_url_scoping():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
# scope rules that look at parent page url should consider both the
# original url and the redirect url, if any, of the parent page
site = brozzler.Site(rr, {
'seed': 'http://example.com/foo/',
'scope': {
'accepts': [{
'parent_url_regex': '^http://example.com/acceptme/.*$'}],
'blocks': [{
'parent_url_regex': '^http://example.com/blockme/.*$'}],
},
'remember_outlinks': True})
site.save()
# an outlink that would not otherwise be in scope
outlinks = ['https://some-random-url.com/']
# parent page does not match any parent_url_regex
parent_page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/foo/spluh'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == outlinks
assert parent_page.outlinks['accepted'] == []
# parent page url matches accept parent_url_regex
parent_page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/acceptme/futz'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == []
assert parent_page.outlinks['accepted'] == outlinks
# parent page redirect_url matches accept parent_url_regex
parent_page_c = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/toot/blah',
'redirect_url':'http://example.com/acceptme/futz'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == []
assert parent_page.outlinks['accepted'] == outlinks
# an outlink that would normally be in scope
outlinks = ['http://example.com/foo/whatever/']
# parent page does not match any parent_url_regex
parent_page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/foo/spluh'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == []
assert parent_page.outlinks['accepted'] == outlinks
# parent page url matches block parent_url_regex
parent_page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/blockme/futz'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == outlinks
assert parent_page.outlinks['accepted'] == []
# parent page redirect_url matches block parent_url_regex
parent_page_c = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/toot/blah',
'redirect_url':'http://example.com/blockme/futz'})
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert parent_page.outlinks['rejected'] == outlinks
assert parent_page.outlinks['accepted'] == []
def test_completed_page():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
# redirect that changes scope surt
site = brozzler.Site(rr, {'seed':'http://example.com/a/'})
site.save()
page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/a/',
'claimed': True,
'brozzle_count': 0,
'hops_from_seed': 0,
'redirect_url':'http://example.com/b/', })
page.save()
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
frontier.completed_page(site, page)
assert site.scope == {'accepts': [
{'ssurt': 'com,example,//http:/a/'},
{'ssurt': 'com,example,//http:/b/'}]}
site.refresh()
assert site.scope == {'accepts': [
{'ssurt': 'com,example,//http:/a/'},
{'ssurt': 'com,example,//http:/b/'}]}
assert page.brozzle_count == 1
assert page.claimed == False
page.refresh()
assert page.brozzle_count == 1
assert page.claimed == False
# redirect that doesn't change scope surt because destination is covered by
# the original surt
site = brozzler.Site(rr, {'seed':'http://example.com/a/'})
site.save()
page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/a/',
'claimed': True,
'brozzle_count': 0,
'hops_from_seed': 0,
'redirect_url':'http://example.com/a/x/', })
page.save()
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
frontier.completed_page(site, page)
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
site.refresh()
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
assert page.brozzle_count == 1
assert page.claimed == False
page.refresh()
assert page.brozzle_count == 1
assert page.claimed == False
# redirect that doesn't change scope surt because page is not the seed page
site = brozzler.Site(rr, {'seed':'http://example.com/a/'})
site.save()
page = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/c/',
'claimed': True,
'brozzle_count': 0,
'hops_from_seed': 1,
'redirect_url':'http://example.com/d/', })
page.save()
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
frontier.completed_page(site, page)
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
site.refresh()
assert site.scope == {'accepts': [{'ssurt': 'com,example,//http:/a/'}]}
assert page.brozzle_count == 1
assert page.claimed == False
page.refresh()
assert page.brozzle_count == 1
assert page.claimed == False
def test_seed_page():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
site = brozzler.Site(rr, {'seed':'http://example.com/a/'})
site.save()
assert frontier.seed_page(site.id) is None
page1 = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/a/b/',
'hops_from_seed': 1})
page1.save()
assert frontier.seed_page(site.id) is None
page0 = brozzler.Page(rr, {
'site_id': site.id,
'url': 'http://example.com/a/',
'hops_from_seed': 0})
page0.save()
assert frontier.seed_page(site.id) == page0
def test_hashtag_seed():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
# no hash tag
site = brozzler.Site(rr, {'seed': 'http://example.org/'})
brozzler.new_site(frontier, site)
assert site.scope == {'accepts': [{'ssurt': 'org,example,//http:/'}]}
pages = list(frontier.site_pages(site.id))
assert len(pages) == 1
assert pages[0].url == 'http://example.org/'
assert not pages[0].hashtags
# yes hash tag
site = brozzler.Site(rr, {'seed': 'http://example.org/#hash'})
brozzler.new_site(frontier, site)
assert site.scope == {'accepts': [{'ssurt': 'org,example,//http:/'}]}
pages = list(frontier.site_pages(site.id))
assert len(pages) == 1
assert pages[0].url == 'http://example.org/'
assert pages[0].hashtags == ['#hash',]
def test_hashtag_links():
rr = doublethink.Rethinker('localhost', db='test_hashtag_links')
frontier = brozzler.RethinkDbFrontier(rr)
site = brozzler.Site(rr, {'seed': 'http://example.org/'})
brozzler.new_site(frontier, site)
parent_page = frontier.seed_page(site.id)
assert not parent_page.hashtags
outlinks = [
'http://example.org/#foo',
'http://example.org/bar',
'http://example.org/bar#baz',
'http://example.org/bar#quux',
'http://example.org/zuh#buh',
]
frontier.scope_and_schedule_outlinks(site, parent_page, outlinks)
pages = sorted(list(frontier.site_pages(site.id)), key=lambda p: p.url)
assert len(pages) == 3
assert pages[0].url == 'http://example.org/'
assert sorted(pages[0].outlinks['accepted']) == [
'http://example.org/', 'http://example.org/bar',
'http://example.org/zuh']
assert not pages[0].outlinks['blocked']
assert not pages[0].outlinks['rejected']
assert pages[0].hashtags == ['#foo',]
assert pages[0].hops_from_seed == 0
assert pages[1].url == 'http://example.org/bar'
assert sorted(pages[1].hashtags) == ['#baz','#quux']
assert pages[1].priority == 36
assert pages[1].hops_from_seed == 1
assert pages[2].url == 'http://example.org/zuh'
assert pages[2].hashtags == ['#buh']
assert pages[2].priority == 12
def test_honor_stop_request():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
# 1. test stop request on job
job_conf = {'seeds': [{'url': 'http://example.com'}]}
job = brozzler.new_job(frontier, job_conf)
assert job.id
sites = list(frontier.job_sites(job.id))
assert len(sites) == 1
site = sites[0]
assert site.job_id == job.id
# does not raise exception
frontier.honor_stop_request(site)
# set job.stop_requested
job.stop_requested = datetime.datetime.utcnow().replace(
tzinfo=doublethink.UTC)
job.save()
with pytest.raises(brozzler.CrawlStopped):
frontier.honor_stop_request(site)
# 2. test stop request on site
job_conf = {'seeds': [{'url': 'http://example.com'}]}
job = brozzler.new_job(frontier, job_conf)
assert job.id
sites = list(frontier.job_sites(job.id))
assert len(sites) == 1
site = sites[0]
assert site.job_id == job.id
# does not raise exception
frontier.honor_stop_request(site)
# set site.stop_requested
site.stop_requested = doublethink.utcnow()
site.save()
with pytest.raises(brozzler.CrawlStopped):
frontier.honor_stop_request(site)
def test_claim_site():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
rr.table('sites').delete().run() # clean slate
with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_sites()
site = brozzler.Site(rr, {'seed': 'http://example.org/'})
brozzler.new_site(frontier, site)
claimed_sites = frontier.claim_sites()
assert len(claimed_sites) == 1
claimed_site = claimed_sites[0]
assert claimed_site.id == site.id
assert claimed_site.claimed
assert claimed_site.last_claimed >= doublethink.utcnow() - datetime.timedelta(minutes=1)
with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_sites()
# site last_claimed less than 1 hour ago still not to be reclaimed
claimed_site.last_claimed = doublethink.utcnow() - datetime.timedelta(minutes=55)
claimed_site.save()
with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_sites()
# site last_claimed more than 1 hour ago can be reclaimed
site = claimed_site
claimed_site = None
site.last_claimed = doublethink.utcnow() - datetime.timedelta(minutes=65)
site.save()
claimed_sites = frontier.claim_sites()
assert len(claimed_sites) == 1
claimed_site = claimed_sites[0]
assert claimed_site.id == site.id
# clean up
rr.table('sites').get(claimed_site.id).delete().run()
def test_max_claimed_sites():
# max_claimed_sites is a brozzler job setting that puts a cap on the number
# of the job's sites that can be brozzled simultaneously across the cluster
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
# clean slate
rr.table('jobs').delete().run()
rr.table('sites').delete().run()
job_conf = {
'seeds': [
{'url': 'http://example.com/1'},
{'url': 'http://example.com/2'},
{'url': 'http://example.com/3'},
{'url': 'http://example.com/4'},
{'url': 'http://example.com/5'},
],
'max_claimed_sites': 3,
}
job = brozzler.new_job(frontier, job_conf)
assert job.id
assert job.max_claimed_sites == 3
sites = list(frontier.job_sites(job.id))
assert len(sites) == 5
claimed_sites = frontier.claim_sites(1)
assert len(claimed_sites) == 1
claimed_sites = frontier.claim_sites(3)
assert len(claimed_sites) == 2
with pytest.raises(brozzler.NothingToClaim):
claimed_site = frontier.claim_sites(3)
# clean slate for the next one
rr.table('jobs').delete().run()
rr.table('sites').delete().run()
def test_choose_warcprox():
rr = doublethink.Rethinker('localhost', db='ignoreme')
svcreg = doublethink.ServiceRegistry(rr)
frontier = brozzler.RethinkDbFrontier(rr)
# avoid this error: https://travis-ci.org/internetarchive/brozzler/jobs/330991786#L1021
rr.table('sites').wait().run()
rr.table('services').wait().run()
rr.table('sites').index_wait().run()
rr.table('services').index_wait().run()
# clean slate
rr.table('sites').delete().run()
rr.table('services').delete().run()
worker = brozzler.BrozzlerWorker(frontier, svcreg)
assert worker._choose_warcprox() is None
rr.table('services').insert({
'role': 'warcprox',
'first_heartbeat': doublethink.utcnow(),
'last_heartbeat': doublethink.utcnow(),
'host': 'host1', 'port': 8000,
'load': 0, 'ttl': 60}).run()
rr.table('services').insert({
'role': 'warcprox',
'first_heartbeat': doublethink.utcnow(),
'last_heartbeat': doublethink.utcnow(),
'host': 'host2', 'port': 8000,
'load': 0, 'ttl': 60}).run()
rr.table('services').insert({
'role': 'warcprox',
'first_heartbeat': doublethink.utcnow(),
'last_heartbeat': doublethink.utcnow(),
'host': 'host2', 'port': 8001,
'load': 0, 'ttl': 60}).run()
rr.table('services').insert({
'role': 'warcprox',
'first_heartbeat': doublethink.utcnow(),
'last_heartbeat': doublethink.utcnow(),
'host': 'host3', 'port': 8000,
'load': 0, 'ttl': 60}).run()
rr.table('services').insert({
'role': 'warcprox',
'first_heartbeat': doublethink.utcnow(),
'last_heartbeat': doublethink.utcnow(),
'host': 'host4', 'port': 8000,
'load': 1, 'ttl': 60}).run()
rr.table('sites').insert({
'proxy': 'host1:8000', 'status': 'ACTIVE',
'last_disclaimed': doublethink.utcnow()}).run()
rr.table('sites').insert({
'proxy': 'host1:8000', 'status': 'ACTIVE',
'last_disclaimed': doublethink.utcnow()}).run()
rr.table('sites').insert({
'proxy': 'host2:8000', 'status': 'ACTIVE',
'last_disclaimed': doublethink.utcnow()}).run()
rr.table('sites').insert({
'proxy': 'host2:8001', 'status': 'ACTIVE',
'last_disclaimed': doublethink.utcnow()}).run()
instance = worker._choose_warcprox()
assert instance['host'] == 'host3'
assert instance['port'] == 8000
rr.table('sites').insert({
'proxy': 'host3:8000', 'status': 'ACTIVE',
'last_disclaimed': doublethink.utcnow()}).run()
instance = worker._choose_warcprox()
assert instance['host'] == 'host4'
assert instance['port'] == 8000
# clean up
rr.table('sites').delete().run()
rr.table('services').delete().run()
def test_max_hops_off():
rr = doublethink.Rethinker('localhost', db='ignoreme')
frontier = brozzler.RethinkDbFrontier(rr)
site = brozzler.Site(rr, {
'seed': 'http://example.com/',
'scope': {
'max_hops_off_surt': 1,
'blocks': [{'ssurt': 'domain,bad,'}]}})
brozzler.new_site(frontier, site)
site.refresh() # get it back from the db
# renamed this param
assert not 'max_hops_off_surt' in site.scope
assert site.scope['max_hops_off'] == 1
seed_page = frontier.seed_page(site.id)
assert site.accept_reject_or_neither('http://foo.org/', seed_page) is None
assert site.accept_reject_or_neither('https://example.com/toot', seed_page) is None
assert site.accept_reject_or_neither('http://example.com/toot', seed_page) is True
assert site.accept_reject_or_neither('https://some.bad.domain/something', seed_page) is False
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
# two of these are in scope because of max_hops_off
frontier.scope_and_schedule_outlinks(site, seed_page, [
'http://foo.org/', 'https://example.com/toot',
'http://example.com/toot', 'https://some.bad.domain/something'])
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
pages = sorted(list(frontier.site_pages(site.id)), key=lambda p: p.url)
assert len(pages) == 4
assert pages[0].url == 'http://example.com/'
assert pages[0].hops_off == 0
assert not 'hops_off_surt' in pages[0]
assert set(pages[0].outlinks['accepted']) == {
'https://example.com/toot', 'http://foo.org/',
'http://example.com/toot'}
assert pages[0].outlinks['blocked'] == []
assert pages[0].outlinks['rejected'] == [
'https://some.bad.domain/something']
assert {
'brozzle_count': 0,
'claimed': False,
'hashtags': [],
'hops_from_seed': 1,
'hops_off': 0,
'id': brozzler.Page.compute_id(site.id, 'http://example.com/toot'),
'job_id': None,
'needs_robots_check': False,
'priority': 12,
'site_id': site.id,
'url': 'http://example.com/toot',
'via_page_id': seed_page.id
} in pages
assert {
'brozzle_count': 0,
'claimed': False,
'hashtags': [],
'hops_from_seed': 1,
'hops_off': 1,
'id': brozzler.Page.compute_id(site.id, 'http://foo.org/'),
'job_id': None,
'needs_robots_check': False,
'priority': 12,
'site_id': site.id,
'url': 'http://foo.org/',
'via_page_id': seed_page.id
} in pages
assert {
'brozzle_count': 0,
'claimed': False,
'hashtags': [],
'hops_from_seed': 1,
'hops_off': 1,
'id': brozzler.Page.compute_id(site.id, 'https://example.com/toot'),
'job_id': None,
'needs_robots_check': False,
'priority': 12,
'site_id': site.id,
'url': 'https://example.com/toot',
'via_page_id': seed_page.id
} in pages
# next hop is past max_hops_off, but normal in scope url is in scope
foo_page = [pg for pg in pages if pg.url == 'http://foo.org/'][0]
orig_is_permitted_by_robots = brozzler.is_permitted_by_robots
brozzler.is_permitted_by_robots = lambda *args: True
try:
frontier.scope_and_schedule_outlinks(site, foo_page, [
'http://foo.org/bar', 'http://example.com/blah'])
finally:
brozzler.is_permitted_by_robots = orig_is_permitted_by_robots
assert foo_page == {
'brozzle_count': 0,
'claimed': False,
'hashtags': [],
'hops_from_seed': 1,
'hops_off': 1,
'id': brozzler.Page.compute_id(site.id, 'http://foo.org/'),
'job_id': None,
'needs_robots_check': False,
'priority': 12,
'site_id': site.id,
'url': 'http://foo.org/',
'via_page_id': seed_page.id,
'outlinks': {
'accepted': ['http://example.com/blah'],
'blocked': [],
'rejected': ['http://foo.org/bar'],
}
}
pages = sorted(list(frontier.site_pages(site.id)), key=lambda p: p.url)
assert len(pages) == 5
assert {
'brozzle_count': 0,
'claimed': False,
'hashtags': [],
'hops_from_seed': 2,
'hops_off': 0,
'id': brozzler.Page.compute_id(site.id, 'http://example.com/blah'),
'job_id': None,
'needs_robots_check': False,
'priority': 11,
'site_id': site.id,
'url': 'http://example.com/blah',
'via_page_id': foo_page.id
} in pages
|
internetarchive/brozzler
|
tests/test_frontier.py
|
Python
|
apache-2.0
| 39,298
|
# Example of working with non-blocking "wrapper" sockers, SSL sockets in this
# case. Working with complex wrapper sockets involves handling of special
# uio.WANT_READ return from .write(), and uio.WANT_WRITE from .read(). This
# is in addition to handling None special return from both of these.
# For comparison, in CPython, instead of special return values, there're
# exceptions SSLWantReadError, SSLWantWriteError, which adhoc to its "ssl"
# module (whereas Pycopy's mechanism is both efficient and fully generic,
# other wrapper streams, e.g. websockets, would work the same way).
import utime
import uio
import usocket
import ussl
import uselect
def wait_sock(s, poll_flag):
poll = uselect.poll()
poll.register(s, poll_flag)
t = utime.time()
res = poll.poll()
print(res)
print("Became readable in:", utime.time() - t, "s")
def write_all(io_sock, poll_sock, data):
while data:
res = io_sock.write(data)
print("write_all:", res)
if res is None:
wait_sock(poll_sock, uselect.POLLOUT)
elif res == uio.WANT_READ:
wait_sock(poll_sock, uselect.POLLIN)
elif res > 0:
data = data[res:]
else:
assert 0, "Unexpected return from ussl write: %s" % res
print("write_data done")
def read_all(io_sock, poll_sock, size):
buf = b""
while size:
res = io_sock.read(size)
print("read_all:", res)
if res is None:
wait_sock(poll_sock, uselect.POLLIN)
elif res == uio.WANT_WRITE:
wait_sock(poll_sock, uselect.POLLOUT)
elif res == b"":
break
elif isinstance(res, bytes):
buf += res
if size != -1:
size -= len(res)
else:
assert 0, "Unexpected return from ussl read: %s" % res
print("read_data done")
return buf
def main():
underlying_socket = usocket.socket()
ai = usocket.getaddrinfo("google.com", 443)
print("Address infos:", ai)
addr = ai[0][-1]
print("Connect address:", addr)
underlying_socket.connect(addr)
wrapper_socket = ussl.wrap_socket(underlying_socket, do_handshake=False)
wrapper_socket.setblocking(False)
print(wrapper_socket)
write_all(wrapper_socket, underlying_socket, b"GET / HTTP/1.0\r\n\r\n")
print(read_all(wrapper_socket, underlying_socket, -1))
wrapper_socket.close()
underlying_socket.close()
main()
|
pfalcon/micropython
|
examples/network/http_client_ssl_nonblock.py
|
Python
|
mit
| 2,463
|
#
# -*- coding: utf-8 -*-
# gui_skeleton.py
# Author: d10n
# No copyright
# Public domain
from __future__ import unicode_literals
import wx
from i18n import _
class MainFrameBase(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(
self,
parent,
title=_('SOS PiP Tool'),
style=(wx.DEFAULT_FRAME_STYLE
& ~wx.MAXIMIZE_BOX
& ~wx.RESIZE_BORDER)
)
self.sizer_main = wx.BoxSizer()
self.SetSizer(self.sizer_main)
self.panel_main = MainPanel(self)
self.panel_firstrun = NoticePanel(
self, _('Initial setup: installing background templates...'))
self.panel_adding = NoticePanel(
self, _('Adding Clip...'))
self.panel_loading = NoticePanel(
self, _('Loading...')
)
self.SetSize(self.panel_main.GetBestSize())
self.SetSize((400, -1))
self.panels = []
def add_panel(panel):
self.panels.append(panel)
self.sizer_main.Add(panel, 1, wx.EXPAND)
self.sizer_main.Show(panel, False)
add_panel(self.panel_main)
add_panel(self.panel_firstrun)
add_panel(self.panel_adding)
add_panel(self.panel_loading)
self.load_panel(self.panel_loading)
self.panel_main.btn_add.Bind(wx.EVT_BUTTON, self.btn_add_act)
def load_panel(self, panel):
for item in self.panels:
item.Show(False)
panel.Show(True)
self.Layout()
# self.Update()
wx.Yield()
# Virtual event handlers. Override them in your derived class
def btn_add_act(self, event):
event.Skip()
class NoticePanel(wx.Panel):
def __init__(self, parent, message):
wx.Panel.__init__(self, parent)
sizer_h = wx.BoxSizer(wx.VERTICAL)
sizer_v = wx.BoxSizer(wx.HORIZONTAL)
self.lbl = wx.StaticText(self, label=message)
border = 5
label_max_width = self.GetParent().GetSize()[0] - (10 * border)
self.lbl.Wrap(label_max_width)
sizer_h.Add(sizer_v, 1, flag=wx.CENTER)
sizer_v.Add(self.lbl, 0, flag=wx.CENTER, border=border)
self.SetSizer(sizer_h)
class MainPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
sizer = wx.FlexGridSizer(rows=0, cols=2)
sizer.AddGrowableCol(1)
self.lbl_clip = wx.StaticText(
self, label=_('Clip Name'))
self.lbl_bg = wx.StaticText(
self, label=_('Background Image'))
self.lbl_pip = wx.StaticText(
self, label=_('PiP Image'))
self.lbl_scale = wx.StaticText(
self, label=_('PiP Scale'))
self.lbl_category = wx.StaticText(
self, label=_('Category'))
self.txt_clip_name = wx.TextCtrl(self, wx.ID_ANY)
# Bug: wxPython doesn't do tab traversal over FPC (at least on GTK)
# Bug: wxPython clears the filename but keeps the value on cancel
# TODO: work around these bugs
self.file_picker_bg = wx.FilePickerCtrl(
self,
message=_('Select a file'),
wildcard=(_('Supported Image Types') +
'(*.png;*.jpg;*.jpeg;*.gif;*.bmp)' +
'|*.png;*.jpg;*.jpeg;*.gif;*.bmp')
)
self.file_picker_pip = wx.FilePickerCtrl(
self,
message=_('Select a file'),
wildcard=(_('Supported Image Types') +
'(*.png;*.jpg;*.jpeg;*.gif;*.bmp)' +
'|*.png;*.jpg;*.jpeg;*.gif;*.bmp')
)
self.slider_scale = wx.Slider(
self,
value=60, minValue=45, maxValue=90,
style=wx.SL_HORIZONTAL | wx.SL_LABELS
)
# TRANSLATORS: "extra" is the standard SOS extra category
combo_category_choices = [_('extra'), _('sponsors')]
self.combo_category = wx.ComboBox(self, choices=combo_category_choices)
self.combo_category.SetSelection(0)
self.btn_add = wx.Button(self, label=_('&Add Clip'))
margin = 5
def add_label(item):
flag_label = wx.ALL | wx.ALIGN_CENTER_VERTICAL
sizer.Add(item, flag=flag_label, border=margin)
def add_input(item):
flag_input = wx.ALL | wx.ALIGN_CENTER_VERTICAL | wx.EXPAND
sizer.Add(item, flag=flag_input, border=margin)
add_label(self.lbl_clip)
add_input(self.txt_clip_name)
add_label(self.lbl_bg)
add_input(self.file_picker_bg)
add_label(self.lbl_pip)
add_input(self.file_picker_pip)
add_label(self.lbl_scale)
add_input(self.slider_scale)
add_label(self.lbl_category)
add_input(self.combo_category)
sizer.AddSpacer(0)
add_label(self.btn_add)
self.SetSizer(sizer)
# self.Fit()
if __name__ == '__main__':
app = wx.App(False)
frame = MainFrameBase(None)
frame.Show()
frame.load_panel(frame.panel_main)
app.MainLoop()
|
d10n/sos-pip-tool
|
sos_pip_tool/gui_skeleton.py
|
Python
|
unlicense
| 5,091
|
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QInputDialog, QLineEdit, QFileDialog
from PyQt5.QtGui import QIcon
class App(QWidget):
def __init__(self):
super().__init__()
self.title = 'PyQt5 file dialogs - pythonspot.com'
self.left = 10
self.top = 10
self.width = 640
self.height = 480
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.openFileNameDialog()
self.openFileNamesDialog()
self.saveFileDialog()
self.show()
def openFileNameDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(self,"QFileDialog.getOpenFileName()", "","All Files (*);;Python Files (*.py)", options=options)
if fileName:
print(fileName)
def openFileNamesDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
files, _ = QFileDialog.getOpenFileNames(self,"QFileDialog.getOpenFileNames()", "","All Files (*);;Python Files (*.py)", options=options)
if files:
print(files)
def saveFileDialog(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getSaveFileName(self,"QFileDialog.getSaveFileName()","","All Files (*);;Text Files (*.txt)", options=options)
if fileName:
print(fileName)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = App()
sys.exit(app.exec_())
|
steinnymir/RegAscope2017
|
test_scripts/GUI_test/PyQt5 Examples/filedialogs.py
|
Python
|
mit
| 1,770
|
import contextlib
import mmap
import os
import unittest
import aiy.vision.proto.protocol_pb2 as pb2
from aiy.vision._spicomm import SPICOMM_DEV
from aiy.vision._spicomm import SPICOMM_IOCTL_TRANSACT
from aiy.vision._spicomm import SPICOMM_IOCTL_TRANSACT_MMAP
from aiy.vision._spicomm import AsyncSpicomm
from aiy.vision._spicomm import SyncSpicomm
from aiy.vision._spicomm import SyncSpicommMmap
@contextlib.contextmanager
def SpicommDev():
dev = os.open(SPICOMM_DEV, os.O_RDWR)
try:
yield dev
finally:
os.close(dev)
def num_pages(length):
return (length + mmap.PAGESIZE - 1) // mmap.PAGESIZE
def get_camera_state(spicomm, timeout=None):
request = pb2.Request(get_camera_state=pb2.Request.GetCameraState())
response = pb2.Response()
response.ParseFromString(spicomm.transact(request.SerializeToString(), timeout))
return response
def get_invalid(spicomm, size, timeout=None):
response = pb2.Response()
response.ParseFromString(spicomm.transact(b'A' * size, timeout))
return response
class SpicommTestMixin:
def test_empty_request(self):
with self.Spicomm() as spicomm:
with self.assertRaises(OSError):
spicomm.transact(b'')
def test_valid_request(self):
with self.Spicomm() as spicomm:
response = get_camera_state(spicomm)
self.assertEqual(pb2.Response.Status.OK, response.status.code)
def test_valid_request_force_allocate(self):
with self.Spicomm(default_payload_size=8) as spicomm:
response = get_camera_state(spicomm)
self.assertEqual(pb2.Response.Status.OK, response.status.code)
def test_invalid_request(self):
with self.Spicomm() as spicomm:
response = get_invalid(spicomm, 32)
self.assertEqual(pb2.Response.Status.ERROR, response.status.code)
def test_invalid_request_timeout(self):
with self.Spicomm() as spicomm:
with self.assertRaises(OSError):
response = get_invalid(spicomm, size=1024 * 1024, timeout=0.001)
self.assertEqual(pb2.Response.Status.OK, response.status.code)
def test_invalid_request_force_allocate(self):
with self.Spicomm(default_payload_size=8) as spicomm:
response = get_invalid(spicomm, 1024 * 1024)
self.assertEqual(pb2.Response.Status.ERROR, response.status.code)
def test_huge_invalid_request(self):
with self.Spicomm(default_payload_size=8) as spicomm:
response = get_invalid(spicomm, 10 * 1024 * 1024)
self.assertEqual(pb2.Response.Status.ERROR, response.status.code)
class AsyncSpicommTest(SpicommTestMixin, unittest.TestCase):
Spicomm = AsyncSpicomm
class SyncSpicommTest(SpicommTestMixin, unittest.TestCase):
Spicomm = SyncSpicomm
class SyncSpicommMmapTest(SpicommTestMixin, unittest.TestCase):
Spicomm = SyncSpicommMmap
def test_multiple_dev(self):
with SpicommDev() as dev1, SpicommDev() as dev2:
with mmap.mmap(dev1, length=63, offset = 5 * mmap.PAGESIZE) as mm1, \
mmap.mmap(dev2, length=63, offset = 5 * mmap.PAGESIZE) as mm2:
self.assertEqual(len(mm1), 63)
self.assertEqual(len(mm2), 63)
def test_mappings(self):
with SpicommDev() as dev:
with mmap.mmap(dev, length=47, offset=0 * mmap.PAGESIZE) as mm1, \
mmap.mmap(dev, length=53, offset=1 * mmap.PAGESIZE) as mm2, \
mmap.mmap(dev, length=100 * mmap.PAGESIZE + 1, offset=2 * mmap.PAGESIZE) as mm3:
self.assertEqual(len(mm1), 47)
self.assertEqual(len(mm2), 53)
self.assertEqual(len(mm3), 100 * mmap.PAGESIZE + 1)
def test_big_mappings(self):
with SpicommDev() as dev:
with mmap.mmap(dev, length=7 * 1024 * 1024 + 1, offset=0 * mmap.PAGESIZE) as mm1, \
mmap.mmap(dev, length=12 * 1024 * 1024 + 2 ,
offset=num_pages(len(mm1)) * mmap.PAGESIZE) as mm2:
self.assertEqual(len(mm1), 7 * 1024 * 1024 + 1)
self.assertEqual(len(mm2), 12 * 1024 * 1024 + 2)
def test_multiple_map_unmap(self):
with SpicommDev() as dev:
for i in range(1, 100):
with mmap.mmap(dev, length=i * 40, offset=50 * mmap.PAGESIZE) as mm:
self.assertEqual(len(mm), i * 40)
def test_zero_length_mapping(self):
with SpicommDev() as dev:
with self.assertRaises(OSError):
with mmap.mmap(dev, length=0, offset=0):
pass
def test_max_mappings(self):
with SpicommDev() as dev, \
contextlib.ExitStack() as stack:
for i in range(8):
stack.enter_context(mmap.mmap(dev, length=mmap.PAGESIZE, offset=i * mmap.PAGESIZE))
with self.assertRaises(OSError):
with mmap.mmap(dev, length=mmap.PAGESIZE, offset=100 * mmap.PAGESIZE):
pass
def test_same_offset_mappings(self):
with SpicommDev() as dev:
offset = 5 * mmap.PAGESIZE
with mmap.mmap(dev, length=47, offset=offset):
with self.assertRaises(OSError):
with mmap.mmap(dev, length=54, offset=offset):
pass
with self.assertRaises(OSError):
with mmap.mmap(dev, length=1, offset=offset):
pass
def test_overlapping_mappings(self):
with SpicommDev() as dev:
with mmap.mmap(dev, length=47, offset=5 * mmap.PAGESIZE) as mm:
self.assertEqual(len(mm), 47)
# Right before with overlap
with self.assertRaises(OSError):
with mmap.mmap(dev, length=mmap.PAGESIZE + 1, offset=4 * mmap.PAGESIZE):
pass
# Right before
with mmap.mmap(dev, length=mmap.PAGESIZE, offset=4 * mmap.PAGESIZE):
pass
# Right after
with mmap.mmap(dev, length=mmap.PAGESIZE, offset=(1 + 5) * mmap.PAGESIZE):
pass
if __name__ == '__main__':
unittest.main()
|
google/aiyprojects-raspbian
|
src/tests/spicomm_test.py
|
Python
|
apache-2.0
| 6,254
|
'''
Description: This is the 3DS file parser, it produces a 3ds file object
with the File3Ds.open method
Status: Nearly complete, some bone data missing
License: AGPLv3, see LICENSE for more details
Copyright: 2011 Florian Boesch <pyalot@gmail.com>
Helpful Links:
http://en.wikipedia.org/wiki/.3ds
http://www.spacesimulator.net/wiki/index.php?title=Tutorials:3ds_Loader
http://www.martinreddy.net/gfx/3d/3DS.spec
http://faydoc.tripod.com/formats/3ds.htm
'''
from struct import unpack
from vector import Vec2, Vec3
class Data:
size = 0
def __init__(self, parent, data):
self.parent = parent
def __repr__(self):
return self.__class__.__name__
class Main(Data): pass
class Editor(Data): pass
class Object(Data):
def __init__(self, parent, data):
self.parent = parent
zero_index = data.find('\0')
self.name = data[:zero_index]
self.size = zero_index+1
def __repr__(self):
return '%s %s' % (self.__class__.__name__, self.name)
class Mesh(Data): pass
class Vertices(Data):
def __init__(self, parent, data):
self.parent = parent
count = unpack('H', data[:2])[0]
data = data[2:]
self.vertices = []
for i in range(count):
x, y, z = unpack('fff', data[:3*4])
data = data[3*4:]
self.vertices.append(Vec3(x,z,-y))
self.size = 2 + count*3*4
for i, v1 in enumerate(self.vertices):
for j, v2 in list(enumerate(self.vertices))[i+1:]:
if v1.x == v2.x and v1.y == v2.y and v1.z == v2.z:
self.vertices[j] = v1
class Faces(Data):
def __init__(self, parent, data):
self.parent = parent
count = unpack('H', data[:2])[0]
data = data[2:]
self.faces = []
for i in range(count):
j = i+1
v1, v2, v3, flags = unpack('HHHH', data[i*4*2:j*4*2])
self.faces.append((v1, v2, v3, flags))
self.size = 2 + count*4*2
class FaceMaterial:
def __init__(self, parent, data):
self.parent = parent
zero_index = data.find('\0')
self.name = data[:zero_index]
data = data[zero_index+1:]
count = unpack('H', data[:2])[0]
data = data[2:]
#todo get indices
self.faces = []
for i in range(count):
face_index = unpack('H', data[:2])[0]
data = data[2:]
self.faces.append(face_index)
self.size = zero_index+1 + 2 + count*2
def __repr__(self):
return '%s %s' % (self.__class__.__name__, self.name)
class Texcoords(Data):
def __init__(self, parent, data):
self.parent = parent
count = unpack('H', data[:2])[0]
data = data[2:]
self.texcoords = []
for i in range(count):
x, y = unpack('ff', data[:8])
data = data[8:]
self.texcoords.append(Vec2(x,1.0-y))
self.size = 2 + count*2*4
class Matrix(Data):
def __init__(self, parent, data):
self.parent = parent
self.size = 12*4
r11, r21, r31, r21, r22, r23, r31, r32, r33, x, y, z = unpack('f'*12, data)
self.rot = [r11, r21, r31, r21, r22, r23, r31, r32, r33]
self.center = Vec3(x, z, -y)
class SmoothGroup(Data):
def __init__(self, parent, data):
self.size = len(data)
self.parent = parent
self.groups = []
for i in range(len(parent.parent.data.faces)):
group_id = unpack('i', data[:4])[0]
self.groups.append(group_id)
data = data[4:]
class Keyframer(Data): pass
class ObjectDescription(Data): pass
class ObjectHirarchy(Data):
def __init__(self, parent, data):
self.parent = parent
self.parent = parent
zero_index = data.find('\0')
self.name = data[:zero_index]
data = data[zero_index+1:]
self.size = zero_index+1 + 3*4
self.hirarchy = unpack('H', data[4:6])[0]
def __repr__(self):
return '%s %s %i' % (self.__class__.__name__, self.name, self.hirarchy)
names = {
0x4d4d: Main,
0x3d3d: Editor,
0x4000: Object,
0x4100: Mesh,
0x4110: Vertices,
0x4120: Faces,
0x4140: Texcoords,
0x4160: Matrix,
0x4130: FaceMaterial,
0x4150: SmoothGroup,
0xb000: Keyframer,
0xb002: ObjectDescription,
0xb010: ObjectHirarchy,
}
def print_chunk(chunk, indent=0):
print '%s%04X: %s' % (' '*indent, chunk.id, chunk.name)
for child in chunk.children:
print_chunk(child, indent+1)
class Children(object):
def __init__(self):
self.list = []
self.map = {}
def add(self, child):
name = child.name.lower()
map = self.map
self.list.append(child)
if name in map:
if isinstance(map[name], list):
map[name].append(child)
else:
map[name] = [map[name], child]
else:
map[name] = child
def __iter__(self):
return iter(self.list)
def __getitem__(self, key):
if isinstance(key, str):
return self.map[key]
else:
return self.list[key]
def __getattr__(self, name):
return self.map[name]
class Chunk:
def __init__(self, parent, id, data):
self.parent = parent
self.id = id
self.name = 'unknown'
self.data = None
self.children = Children()
if id in names:
self.data = names[id](self, data)
#self.name = '%s' % self.data
self.name = self.data.__class__.__name__
self.parse_chunks(data[self.data.size:])
def parse_chunks(self, data):
while data:
id = unpack('H', data[:2])[0]
length = unpack('i', data[2:6])[0]
self.children.add(Chunk(self, id, data[6:length]))
data = data[length:]
class File3Ds:
@staticmethod
def open(filename):
data = open(filename, 'rb').read()
return File3Ds(data)
def __init__(self, data):
self.data = data
id = unpack('H', data[:2])[0]
length = unpack('i', data[2:6])[0]
data = data[6:]
self.main = Chunk(self, id, data)
if __name__ == '__main__':
import sys
filename = sys.argv[1]
infile = File3Ds.open(filename)
for obj in infile.main.children.editor.children.object:
mesh = obj.children.mesh
faces = mesh.children.faces
vertices = mesh.children.vertices
texcoords = mesh.children.texcoords
center = mesh.children.matrix.data.center
groups = faces.children.smoothgroup.data.groups
for i, (v1, v2, v3, flags) in enumerate(faces.data.faces):
group = groups[i]
vert1 = vertices.data.vertices[v1]
vert2 = vertices.data.vertices[v2]
vert3 = vertices.data.vertices[v3]
uv1 = texcoords.data.texcoords[v1]
uv2 = texcoords.data.texcoords[v2]
uv3 = texcoords.data.texcoords[v3]
print_chunk(infile.main)
|
pyalot/parse-3d-files
|
3ds/parse.py
|
Python
|
agpl-3.0
| 7,134
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Claim on Deliveries',
'version' : '1.0',
'author' : 'OpenERP SA',
'category' : 'Warehouse Management',
'depends' : ['base', 'crm_claim', 'stock'],
'demo' : [],
'description': """
Create a claim from a delivery order.
=====================================
Adds a Claim link to the delivery order.
""",
'data' : [
'claim_delivery_view.xml',
'claim_delivery_data.xml',],
'auto_install': False,
'installable': True,
'images': ['images/1_claim_link_delivery_order.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
diogocs1/comps
|
web/addons/claim_from_delivery/__openerp__.py
|
Python
|
apache-2.0
| 1,576
|
#from stock_scraper.spiders.SpotValueSpider import SpotValueSpider
#from stock_scraper.spiders.StockOptionSpider import StockOptionSpider
|
puchchi/stock_scraper_latest
|
scraper/spiders/__init__.py
|
Python
|
mit
| 137
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=R0904, C0111, C0302
"""
This module contains various unit tests for
functions in CloudDLPHook
"""
import unittest
from typing import Any, Dict
from unittest import mock
from unittest.mock import PropertyMock
import pytest
from google.cloud.dlp_v2.types import DlpJob
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.dlp import CloudDLPHook
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id
API_RESPONSE = {} # type: Dict[Any, Any]
ORGANIZATION_ID = "test-org"
ORGANIZATION_PATH = f"organizations/{ORGANIZATION_ID}"
PROJECT_ID = "test-project"
PROJECT_PATH = f"projects/{PROJECT_ID}"
DLP_JOB_ID = "job123"
DLP_JOB_PATH = f"projects/{PROJECT_ID}/dlpJobs/{DLP_JOB_ID}"
TEMPLATE_ID = "template123"
STORED_INFO_TYPE_ID = "type123"
TRIGGER_ID = "trigger123"
DEIDENTIFY_TEMPLATE_ORGANIZATION_PATH = "organizations/{}/deidentifyTemplates/{}".format(
ORGANIZATION_ID, TEMPLATE_ID
)
INSPECT_TEMPLATE_ORGANIZATION_PATH = "organizations/{}/inspectTemplates/{}".format(
ORGANIZATION_ID, TEMPLATE_ID
)
STORED_INFO_TYPE_ORGANIZATION_PATH = "organizations/{}/storedInfoTypes/{}".format(
ORGANIZATION_ID, STORED_INFO_TYPE_ID
)
DEIDENTIFY_TEMPLATE_PROJECT_PATH = f"projects/{PROJECT_ID}/deidentifyTemplates/{TEMPLATE_ID}"
INSPECT_TEMPLATE_PROJECT_PATH = f"projects/{PROJECT_ID}/inspectTemplates/{TEMPLATE_ID}"
STORED_INFO_TYPE_PROJECT_PATH = f"projects/{PROJECT_ID}/storedInfoTypes/{STORED_INFO_TYPE_ID}"
JOB_TRIGGER_PATH = f"projects/{PROJECT_ID}/jobTriggers/{TRIGGER_ID}"
class TestCloudDLPHook(unittest.TestCase):
def setUp(self):
with mock.patch(
"airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__",
new=mock_base_gcp_hook_no_default_project_id,
):
self.hook = CloudDLPHook(gcp_conn_id="test")
@mock.patch(
"airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.client_info", new_callable=mock.PropertyMock
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.dlp.DlpServiceClient")
def test_dlp_service_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.hook.get_conn()
mock_client.assert_called_once_with(
credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value
)
assert mock_client.return_value == result
assert self.hook._client == result
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_cancel_dlp_job(self, get_conn):
self.hook.cancel_dlp_job(dlp_job_id=DLP_JOB_ID, project_id=PROJECT_ID)
get_conn.return_value.cancel_dlp_job.assert_called_once_with(
name=DLP_JOB_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_cancel_dlp_job_without_dlp_job_id(self, _):
with pytest.raises(AirflowException):
self.hook.cancel_dlp_job(dlp_job_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_cancel_dlp_job_without_parent(self, _, mock_project_id):
with pytest.raises(AirflowException):
self.hook.cancel_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_deidentify_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.create_deidentify_template.return_value = API_RESPONSE
result = self.hook.create_deidentify_template(organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.create_deidentify_template.assert_called_once_with(
parent=ORGANIZATION_PATH,
deidentify_template=None,
template_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_deidentify_template_with_project_id(self, get_conn):
get_conn.return_value.create_deidentify_template.return_value = API_RESPONSE
result = self.hook.create_deidentify_template(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.create_deidentify_template.assert_called_once_with(
parent=PROJECT_PATH,
deidentify_template=None,
template_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_deidentify_template_without_parent(self, _, mock_project_id):
with pytest.raises(AirflowException):
self.hook.create_deidentify_template()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_dlp_job(self, get_conn):
get_conn.return_value.create_dlp_job.return_value = API_RESPONSE
result = self.hook.create_dlp_job(project_id=PROJECT_ID, wait_until_finished=False)
assert result is API_RESPONSE
get_conn.return_value.create_dlp_job.assert_called_once_with(
parent=PROJECT_PATH,
inspect_job=None,
risk_job=None,
job_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_dlp_job_without_project_id(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.create_dlp_job() # pylint: disable=no-value-for-parameter
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_dlp_job_with_wait_until_finished(self, get_conn):
job_for_create = DlpJob(name=DLP_JOB_PATH, state=DlpJob.JobState.PENDING)
get_conn.return_value.create_dlp_job.return_value = job_for_create
job_for_get = DlpJob(name=DLP_JOB_PATH, state=DlpJob.JobState.DONE)
get_conn.return_value.get_dlp_job.return_value = job_for_get
self.hook.create_dlp_job(project_id=PROJECT_ID)
get_conn.return_value.get_dlp_job.assert_called_once_with(
name=DLP_JOB_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_inspect_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.create_inspect_template.return_value = API_RESPONSE
result = self.hook.create_inspect_template(organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.create_inspect_template.assert_called_once_with(
parent=ORGANIZATION_PATH,
inspect_template=None,
template_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_inspect_template_with_project_id(self, get_conn):
get_conn.return_value.create_inspect_template.return_value = API_RESPONSE
result = self.hook.create_inspect_template(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.create_inspect_template.assert_called_once_with(
parent=PROJECT_PATH,
inspect_template=None,
template_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_inspect_template_without_parent(self, _, mock_project_id):
with pytest.raises(AirflowException):
self.hook.create_inspect_template()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_job_trigger(self, get_conn):
get_conn.return_value.create_job_trigger.return_value = API_RESPONSE
result = self.hook.create_job_trigger(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.create_job_trigger.assert_called_once_with(
parent=PROJECT_PATH,
job_trigger=None,
trigger_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_job_trigger_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.create_job_trigger() # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_stored_info_type_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.create_stored_info_type.return_value = API_RESPONSE
result = self.hook.create_stored_info_type(organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.create_stored_info_type.assert_called_once_with(
parent=ORGANIZATION_PATH,
config=None,
stored_info_type_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_stored_info_type_with_project_id(self, get_conn):
get_conn.return_value.create_stored_info_type.return_value = API_RESPONSE
result = self.hook.create_stored_info_type(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.create_stored_info_type.assert_called_once_with(
parent=PROJECT_PATH,
config=None,
stored_info_type_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_create_stored_info_type_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.create_stored_info_type()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_deidentify_content(self, get_conn):
get_conn.return_value.deidentify_content.return_value = API_RESPONSE
result = self.hook.deidentify_content(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.deidentify_content.assert_called_once_with(
parent=PROJECT_PATH,
deidentify_config=None,
inspect_config=None,
item=None,
inspect_template_name=None,
deidentify_template_name=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_deidentify_content_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.deidentify_content() # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_deidentify_template_with_org_id(self, get_conn, mock_project_id):
self.hook.delete_deidentify_template(template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID)
get_conn.return_value.delete_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_deidentify_template_with_project_id(self, get_conn):
self.hook.delete_deidentify_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
get_conn.return_value.delete_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_deidentify_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.delete_deidentify_template(template_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_deidentify_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.delete_deidentify_template(template_id=TEMPLATE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_dlp_job(self, get_conn):
self.hook.delete_dlp_job(dlp_job_id=DLP_JOB_ID, project_id=PROJECT_ID)
get_conn.return_value.delete_dlp_job.assert_called_once_with(
name=DLP_JOB_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_dlp_job_without_dlp_job_id(self, _):
with pytest.raises(AirflowException):
self.hook.delete_dlp_job(dlp_job_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_dlp_job_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.delete_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_inspect_template_with_org_id(self, get_conn, mock_project_id):
self.hook.delete_inspect_template(template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID)
get_conn.return_value.delete_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_inspect_template_with_project_id(self, get_conn):
self.hook.delete_inspect_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
get_conn.return_value.delete_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_inspect_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.delete_inspect_template(template_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_inspect_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.delete_inspect_template(template_id=TEMPLATE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_job_trigger(self, get_conn):
self.hook.delete_job_trigger(job_trigger_id=TRIGGER_ID, project_id=PROJECT_ID)
get_conn.return_value.delete_job_trigger.assert_called_once_with(
name=JOB_TRIGGER_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_job_trigger_without_trigger_id(self, _):
with pytest.raises(AirflowException):
self.hook.delete_job_trigger(job_trigger_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_job_trigger_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.delete_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_stored_info_type_with_org_id(self, get_conn, mock_project_id):
self.hook.delete_stored_info_type(
stored_info_type_id=STORED_INFO_TYPE_ID, organization_id=ORGANIZATION_ID
)
get_conn.return_value.delete_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_stored_info_type_with_project_id(self, get_conn):
self.hook.delete_stored_info_type(stored_info_type_id=STORED_INFO_TYPE_ID, project_id=PROJECT_ID)
get_conn.return_value.delete_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_stored_info_type_without_stored_info_type_id(self, _):
with pytest.raises(AirflowException):
self.hook.delete_stored_info_type(stored_info_type_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_delete_stored_info_type_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.delete_stored_info_type(stored_info_type_id=STORED_INFO_TYPE_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_deidentify_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.get_deidentify_template.return_value = API_RESPONSE
result = self.hook.get_deidentify_template(template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.get_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_deidentify_template_with_project_id(self, get_conn):
get_conn.return_value.get_deidentify_template.return_value = API_RESPONSE
result = self.hook.get_deidentify_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.get_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_deidentify_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.get_deidentify_template(template_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_deidentify_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.get_deidentify_template(template_id=TEMPLATE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_dlp_job(self, get_conn):
get_conn.return_value.get_dlp_job.return_value = API_RESPONSE
result = self.hook.get_dlp_job(dlp_job_id=DLP_JOB_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.get_dlp_job.assert_called_once_with(
name=DLP_JOB_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_dlp_job_without_dlp_job_id(self, _):
with pytest.raises(AirflowException):
self.hook.get_dlp_job(dlp_job_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_dlp_job_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.get_dlp_job(dlp_job_id=DLP_JOB_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_inspect_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.get_inspect_template.return_value = API_RESPONSE
result = self.hook.get_inspect_template(template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.get_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_inspect_template_with_project_id(self, get_conn):
get_conn.return_value.get_inspect_template.return_value = API_RESPONSE
result = self.hook.get_inspect_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.get_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_inspect_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.get_inspect_template(template_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_inspect_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.get_inspect_template(template_id=TEMPLATE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_job_trigger(self, get_conn):
get_conn.return_value.get_job_trigger.return_value = API_RESPONSE
result = self.hook.get_job_trigger(job_trigger_id=TRIGGER_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.get_job_trigger.assert_called_once_with(
name=JOB_TRIGGER_PATH, retry=None, timeout=None, metadata=None
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_job_trigger_without_trigger_id(self, _):
with pytest.raises(AirflowException):
self.hook.get_job_trigger(job_trigger_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_job_trigger_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.get_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_stored_info_type_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.get_stored_info_type.return_value = API_RESPONSE
result = self.hook.get_stored_info_type(
stored_info_type_id=STORED_INFO_TYPE_ID, organization_id=ORGANIZATION_ID
)
assert result is API_RESPONSE
get_conn.return_value.get_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_ORGANIZATION_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_stored_info_type_with_project_id(self, get_conn):
get_conn.return_value.get_stored_info_type.return_value = API_RESPONSE
result = self.hook.get_stored_info_type(
stored_info_type_id=STORED_INFO_TYPE_ID, project_id=PROJECT_ID
)
assert result is API_RESPONSE
get_conn.return_value.get_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_PROJECT_PATH,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_stored_info_type_without_stored_info_type_id(self, _):
with pytest.raises(AirflowException):
self.hook.get_stored_info_type(stored_info_type_id=None)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_get_stored_info_type_without_parent(self, mock_get_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.get_stored_info_type(stored_info_type_id=STORED_INFO_TYPE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_inspect_content(self, get_conn):
get_conn.return_value.inspect_content.return_value = API_RESPONSE
result = self.hook.inspect_content(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.inspect_content.assert_called_once_with(
parent=PROJECT_PATH,
inspect_config=None,
item=None,
inspect_template_name=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_inspect_content_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.inspect_content() # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_deidentify_templates_with_org_id(self, get_conn, mock_project_id):
result = self.hook.list_deidentify_templates(organization_id=ORGANIZATION_ID)
assert isinstance(result, list)
get_conn.return_value.list_deidentify_templates.assert_called_once_with(
parent=ORGANIZATION_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_deidentify_templates_with_project_id(self, get_conn):
result = self.hook.list_deidentify_templates(project_id=PROJECT_ID)
assert isinstance(result, list)
get_conn.return_value.list_deidentify_templates.assert_called_once_with(
parent=PROJECT_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_deidentify_templates_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.list_deidentify_templates()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_dlp_jobs(self, get_conn):
result = self.hook.list_dlp_jobs(project_id=PROJECT_ID)
assert isinstance(result, list)
get_conn.return_value.list_dlp_jobs.assert_called_once_with(
parent=PROJECT_PATH,
filter_=None,
page_size=None,
type_=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_dlp_jobs_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.list_dlp_jobs() # pylint: disable=no-value-for-parameter
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_info_types(self, get_conn):
get_conn.return_value.list_info_types.return_value = API_RESPONSE
result = self.hook.list_info_types()
assert result is API_RESPONSE
get_conn.return_value.list_info_types.assert_called_once_with(
language_code=None, filter_=None, retry=None, timeout=None, metadata=None
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_inspect_templates_with_org_id(self, get_conn, mock_project_id):
result = self.hook.list_inspect_templates(organization_id=ORGANIZATION_ID)
assert isinstance(result, list)
get_conn.return_value.list_inspect_templates.assert_called_once_with(
parent=ORGANIZATION_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_inspect_templates_with_project_id(self, get_conn):
result = self.hook.list_inspect_templates(project_id=PROJECT_ID)
assert isinstance(result, list)
get_conn.return_value.list_inspect_templates.assert_called_once_with(
parent=PROJECT_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_inspect_templates_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.list_inspect_templates()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_job_triggers(self, get_conn):
result = self.hook.list_job_triggers(project_id=PROJECT_ID)
assert isinstance(result, list)
get_conn.return_value.list_job_triggers.assert_called_once_with(
parent=PROJECT_PATH,
page_size=None,
order_by=None,
filter_=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_job_triggers_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.list_job_triggers() # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_stored_info_types_with_org_id(self, get_conn, mock_project_id):
result = self.hook.list_stored_info_types(organization_id=ORGANIZATION_ID)
assert isinstance(result, list)
get_conn.return_value.list_stored_info_types.assert_called_once_with(
parent=ORGANIZATION_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_stored_info_types_with_project_id(self, get_conn):
result = self.hook.list_stored_info_types(project_id=PROJECT_ID)
assert isinstance(result, list)
get_conn.return_value.list_stored_info_types.assert_called_once_with(
parent=PROJECT_PATH,
page_size=None,
order_by=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_list_stored_info_types_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.list_stored_info_types()
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_redact_image(self, get_conn):
get_conn.return_value.redact_image.return_value = API_RESPONSE
result = self.hook.redact_image(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.redact_image.assert_called_once_with(
parent=PROJECT_PATH,
inspect_config=None,
image_redaction_configs=None,
include_findings=None,
byte_item=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_redact_image_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.redact_image() # pylint: disable=no-value-for-parameter
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_reidentify_content(self, get_conn):
get_conn.return_value.reidentify_content.return_value = API_RESPONSE
result = self.hook.reidentify_content(project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.reidentify_content.assert_called_once_with(
parent=PROJECT_PATH,
reidentify_config=None,
inspect_config=None,
item=None,
inspect_template_name=None,
reidentify_template_name=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_reidentify_content_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.reidentify_content() # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_deidentify_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.update_deidentify_template.return_value = API_RESPONSE
result = self.hook.update_deidentify_template(
template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID
)
assert result is API_RESPONSE
get_conn.return_value.update_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_ORGANIZATION_PATH,
deidentify_template=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_deidentify_template_with_project_id(self, get_conn):
get_conn.return_value.update_deidentify_template.return_value = API_RESPONSE
result = self.hook.update_deidentify_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.update_deidentify_template.assert_called_once_with(
name=DEIDENTIFY_TEMPLATE_PROJECT_PATH,
deidentify_template=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_deidentify_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.update_deidentify_template(template_id=None, organization_id=ORGANIZATION_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_deidentify_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.update_deidentify_template(template_id=TEMPLATE_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_inspect_template_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.update_inspect_template.return_value = API_RESPONSE
result = self.hook.update_inspect_template(template_id=TEMPLATE_ID, organization_id=ORGANIZATION_ID)
assert result is API_RESPONSE
get_conn.return_value.update_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_ORGANIZATION_PATH,
inspect_template=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_inspect_template_with_project_id(self, get_conn):
get_conn.return_value.update_inspect_template.return_value = API_RESPONSE
result = self.hook.update_inspect_template(template_id=TEMPLATE_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.update_inspect_template.assert_called_once_with(
name=INSPECT_TEMPLATE_PROJECT_PATH,
inspect_template=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_inspect_template_without_template_id(self, _):
with pytest.raises(AirflowException):
self.hook.update_inspect_template(template_id=None, organization_id=ORGANIZATION_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_inspect_template_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.update_inspect_template(template_id=TEMPLATE_ID)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_job_trigger(self, get_conn):
get_conn.return_value.update_job_trigger.return_value = API_RESPONSE
result = self.hook.update_job_trigger(job_trigger_id=TRIGGER_ID, project_id=PROJECT_ID)
assert result is API_RESPONSE
get_conn.return_value.update_job_trigger.assert_called_once_with(
name=JOB_TRIGGER_PATH,
job_trigger=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_job_trigger_without_job_trigger_id(self, _):
with pytest.raises(AirflowException):
self.hook.update_job_trigger(job_trigger_id=None, project_id=PROJECT_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_job_trigger_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.update_job_trigger(job_trigger_id=TRIGGER_ID) # pylint: disable=no-value-for-parameter
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_stored_info_type_with_org_id(self, get_conn, mock_project_id):
get_conn.return_value.update_stored_info_type.return_value = API_RESPONSE
result = self.hook.update_stored_info_type(
stored_info_type_id=STORED_INFO_TYPE_ID, organization_id=ORGANIZATION_ID
)
assert result is API_RESPONSE
get_conn.return_value.update_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_ORGANIZATION_PATH,
config=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_stored_info_type_with_project_id(self, get_conn):
get_conn.return_value.update_stored_info_type.return_value = API_RESPONSE
result = self.hook.update_stored_info_type(
stored_info_type_id=STORED_INFO_TYPE_ID, project_id=PROJECT_ID
)
assert result is API_RESPONSE
get_conn.return_value.update_stored_info_type.assert_called_once_with(
name=STORED_INFO_TYPE_PROJECT_PATH,
config=None,
update_mask=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_stored_info_type_without_stored_info_type_id(self, _):
with pytest.raises(AirflowException):
self.hook.update_stored_info_type(stored_info_type_id=None, organization_id=ORGANIZATION_ID)
@mock.patch(
'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id',
new_callable=PropertyMock,
return_value=None,
)
@mock.patch("airflow.providers.google.cloud.hooks.dlp.CloudDLPHook.get_conn")
def test_update_stored_info_type_without_parent(self, mock_get_conn, mock_project_id):
with pytest.raises(AirflowException):
self.hook.update_stored_info_type(stored_info_type_id=STORED_INFO_TYPE_ID)
|
nathanielvarona/airflow
|
tests/providers/google/cloud/hooks/test_dlp.py
|
Python
|
apache-2.0
| 49,765
|
#!/usr/bin/python
"""tests for panel button function
:author: `Patrick Kanzler <patrick.kanzler@fablab.fau.de>`_
:organization: `python-escpos <https://github.com/python-escpos>`_
:copyright: Copyright (c) 2016 `python-escpos <https://github.com/python-escpos>`_
:license: MIT
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import escpos.printer as printer
def test_function_panel_button_on():
"""test the panel button function (enabling) by comparing output"""
instance = printer.Dummy()
instance.panel_buttons()
assert(instance.output == b'\x1B\x63\x35\x00')
def test_function_panel_button_off():
"""test the panel button function (disabling) by comparing output"""
instance = printer.Dummy()
instance.panel_buttons(False)
assert(instance.output == b'\x1B\x63\x35\x01')
|
belono/python-escpos
|
test/test_function_panel_button.py
|
Python
|
mit
| 917
|
# Eloipool - Python Bitcoin pool server
# Copyright (C) 2011-2013 Luke Dashjr <luke-jr+eloipool@utopios.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from binascii import b2a_hex
import collections
from copy import deepcopy
import json
import logging
import networkserver
import socket
import struct
import sys
from time import time
import traceback
from util import RejectedShare, swap32, target2bdiff, UniqueSessionIdManager
import config
class StratumError(BaseException):
def __init__(self, errno, msg, tb = True):
self.StratumErrNo = errno
self.StratumErrMsg = msg
self.StratumTB = tb
StratumCodes = {
'stale-prevblk': 21,
'stale-work': 21,
'duplicate': 22,
'H-not-zero': 23,
'high-hash': 23,
}
class StratumHandler(networkserver.SocketHandler):
logger = logging.getLogger('StratumHandler')
def __init__(self, *a, **ka):
super().__init__(*a, **ka)
self.remoteHost = self.addr[0]
self.changeTask(None)
self.set_terminator(b"\n")
self.Usernames = {}
self.lastBDiff = None
self.JobTargets = collections.OrderedDict()
self.UA = None
def sendReply(self, ob):
return self.push(json.dumps(ob).encode('ascii') + b"\n")
def found_terminator(self):
inbuf = b"".join(self.incoming).decode('ascii')
self.incoming = []
if not inbuf:
return
try:
rpc = json.loads(inbuf)
except ValueError:
self.boot()
return
if 'method' not in rpc:
# Assume this is a reply to our request
funcname = '_stratumreply_%s' % (rpc['id'],)
if not hasattr(self, funcname):
return
try:
getattr(self, funcname)(rpc)
except BaseException as e:
self.logger.debug(traceback.format_exc())
return
funcname = '_stratum_%s' % (rpc['method'].replace('.', '_'),)
if not hasattr(self, funcname):
self.sendReply({
'error': [-3, "Method '%s' not found" % (rpc['method'],), None],
'id': rpc['id'],
'result': None,
})
return
try:
rv = getattr(self, funcname)(*rpc['params'])
except StratumError as e:
self.sendReply({
'error': (e.StratumErrNo, e.StratumErrMsg, traceback.format_exc() if e.StratumTB else None),
'id': rpc['id'],
'result': None,
})
return
except BaseException as e:
fexc = traceback.format_exc()
self.sendReply({
'error': (20, str(e), fexc),
'id': rpc['id'],
'result': None,
})
if not hasattr(e, 'StratumQuiet'):
self.logger.debug(fexc)
return
if rpc['id'] is None:
return
self.sendReply({
'error': None,
'id': rpc['id'],
'result': rv,
})
def sendJob(self):
target = self.server.defaultTarget / 16
if len(self.Usernames) == 1:
dtarget = self.server.getTarget(next(iter(self.Usernames)), time())
if not dtarget is None:
target = dtarget
bdiff = target2bdiff(target)
if self.lastBDiff != bdiff:
self.sendReply({
'id': None,
'method': 'mining.set_difficulty',
'params': [
bdiff
],
})
self.lastBDiff = bdiff
self.push(self.server.JobBytes)
if len(self.JobTargets) > 4:
self.JobTargets.popitem(False)
self.JobTargets[self.server.JobId] = target
def requestStratumUA(self):
self.sendReply({
'id': 7,
'method': 'client.get_version',
'params': (),
})
def _stratumreply_7(self, rpc):
self.UA = rpc.get('result') or rpc
def _stratum_mining_subscribe(self, UA = None, xid = None):
if not UA is None:
self.UA = UA
if not hasattr(self, '_sid'):
self._sid = UniqueSessionIdManager.get()
if self.server._Clients.get(self._sid) not in (self, None):
del self._sid
raise self.server.RaiseRedFlags(RuntimeError('issuing duplicate sessionid'))
xid = struct.pack('=I', self._sid) # NOTE: Assumes sessionids are 4 bytes
self.extranonce1 = xid
xid = b2a_hex(xid).decode('ascii')
self.server._Clients[id(self)] = self
self.changeTask(self.sendJob, 0)
return [
[
['mining.notify', '%s1' % (xid,)],
['mining.set_difficulty', '%s2' % (xid,)],
],
xid,
4,
]
def close(self):
if hasattr(self, '_sid'):
UniqueSessionIdManager.put(self._sid)
delattr(self, '_sid')
try:
del self.server._Clients[id(self)]
except:
pass
super().close()
def _stratum_mining_submit(self, username, jobid, extranonce2, ntime, nonce):
# print("HERE")
if username not in self.Usernames:
raise StratumError(24, 'unauthorized-user', False)
share = {
'username': username,
'remoteHost': self.remoteHost,
'jobid': jobid,
'extranonce1': self.extranonce1,
'extranonce2': bytes.fromhex(extranonce2),
'ntime': bytes.fromhex(ntime),
'nonce': bytes.fromhex(nonce),
'userAgent': self.UA,
'submitProtocol': 'stratum',
}
if jobid in self.JobTargets:
share['target'] = self.JobTargets[jobid]
try:
self.server.receiveShare(share)
except RejectedShare as rej:
rej = str(rej)
errno = StratumCodes.get(rej, 20)
# print(rej)
raise StratumError(errno, rej, False)
return True
def _stratum_mining_authorize(self, username, password = None):
self.Usernames[username] = None
self.changeTask(self.requestStratumUA, 0)
return True
def _stratum_mining_get_transactions(self, jobid):
try:
(MC, wld) = self.server.getExistingStratumJob(jobid)
except KeyError as e:
e.StratumQuiet = True
raise
(height, merkleTree, cb, prevBlock, bits) = MC[:5]
return list(b2a_hex(txn.data).decode('ascii') for txn in merkleTree.data[1:])
class StratumServer(networkserver.AsyncSocketServer):
logger = logging.getLogger('StratumServer')
waker = True
schMT = True
extranonce1null = struct.pack('=I', 0) # NOTE: Assumes sessionids are 4 bytes
def __init__(self, *a, **ka):
ka.setdefault('RequestHandlerClass', StratumHandler)
super().__init__(*a, **ka)
self._Clients = {}
self._JobId = 0
self.JobId = '%d' % (time(),)
self.WakeRequest = None
self.UpdateTask = None
def updateJob(self, wantClear = False):
if self.UpdateTask:
try:
self.rmSchedule(self.UpdateTask)
except:
pass
self._JobId += 1
JobId = '%d %d' % (time(), self._JobId)
(MC, wld) = self.getStratumJob(JobId, wantClear=wantClear)
(height, merkleTree, cb, prevBlock, bits) = MC[:5]
if len(cb) > 96 - len(self.extranonce1null) - 4:
if not self.rejecting:
self.logger.warning('Coinbase too big for stratum: disabling')
self.rejecting = True
self.boot_all()
self.UpdateTask = self.schedule(self.updateJob, time() + 10)
return
elif self.rejecting:
self.rejecting = False
self.logger.info('Coinbase small enough for stratum again: reenabling')
txn = deepcopy(merkleTree.data[0])
cb += self.extranonce1null + b'Eloi'
txn.setCoinbase(cb)
txn.assemble()
pos = txn.data.index(cb) + len(cb)
steps = list(b2a_hex(h).decode('ascii') for h in merkleTree._steps)
self.JobBytes = json.dumps({
'id': None,
'method': 'mining.notify',
'params': [
JobId,
b2a_hex(swap32(prevBlock)).decode('ascii'),
b2a_hex(txn.data[:pos - len(self.extranonce1null) - 4]).decode('ascii'),
b2a_hex(txn.data[pos:]).decode('ascii'),
steps,
'00000002',
b2a_hex(bits[::-1]).decode('ascii'),
b2a_hex(struct.pack('>L', int(time()))).decode('ascii'),
not self.IsJobValid(self.JobId)
],
}).encode('ascii') + b"\n"
self.JobId = JobId
self.WakeRequest = 1
self.wakeup()
self.UpdateTask = self.schedule(self.updateJob, time() + 98)
def pre_schedule(self):
if self.WakeRequest:
self._wakeNodes()
def _wakeNodes(self):
self.WakeRequest = None
C = self._Clients
if not C:
self.logger.debug('Nobody to wake up')
return
OC = len(C)
self.logger.info("%d clients to wake up..." % (OC,))
now = time()
for ic in list(C.values()):
try:
ic.sendJob()
except socket.error:
OC -= 1
# Ignore socket errors; let the main event loop take care of them later
except:
OC -= 1
self.logger.debug('Error sending new job:\n' + traceback.format_exc())
self.logger.info('New job sent to %d clients in %.3f seconds' % (OC, time() - now))
def getTarget(*a, **ka):
return None
|
darrenturn90/eloipool-merged-vtc
|
stratumserver.py
|
Python
|
agpl-3.0
| 8,661
|
# codicefiscale.py - library for Italian fiscal code
#
# This file is based on code from pycodicefiscale, a Python library for
# working with Italian fiscal code numbers officially known as Italy's
# Codice Fiscale.
# https://github.com/baxeico/pycodicefiscale
#
# Copyright (C) 2009-2013 Emanuele Rocca
# Copyright (C) 2014 Augusto Destrero
# Copyright (C) 2014 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Codice Fiscale (Italian tax code for individuals).
The Codice Fiscale is an alphanumeric code of 16 characters used to
identify individuals residing in Italy. The number consists of three
characters derived from the person's last name, three from the person's
first name, five that hold information on the person's gender and birth
date, four that represent the person's place of birth and one check digit.
>>> validate('RCCMNL83S18D969H')
'RCCMNL83S18D969H'
>>> validate('RCCMNL83S18D969')
Traceback (most recent call last):
...
InvalidLength: ...
>>> calc_check_digit('RCCMNL83S18D969')
'H'
"""
import re
import datetime
from stdnum.exceptions import *
from stdnum.util import clean
# regular expression for matching fiscal codes
_code_re = re.compile(
'^[A-Z]{6}'
'[0-9LMNPQRSTUV]{2}[ABCDEHLMPRST]{1}[0-9LMNPQRSTUV]{2}'
'[A-Z]{1}[0-9LMNPQRSTUV]{3}[A-Z]{1}$')
# encoding of birth day and year values (usually numeric but some letters
# may be substituted on clashes)
_date_digits = dict((x, n) for n, x in enumerate('0123456789'))
_date_digits.update(dict((x, n) for n, x in enumerate('LMNPQRSTUV')))
# encoding of month values (A = January, etc.)
_month_digits = dict((x, n) for n, x in enumerate('ABCDEHLMPRST'))
# values of characters in even positions for checksum calculation
_even_values = dict((x, n) for n, x in enumerate('0123456789'))
_even_values.update(
dict((x, n) for n, x in enumerate('ABCDEFGHIJKLMNOPQRSTUVWXYZ')))
# values of characters in odd positions for checksum calculation
values = [1, 0, 5, 7, 9, 13, 15, 17, 19, 21, 2, 4, 18, 20, 11, 3, 6, 8,
12, 14, 16, 10, 22, 25, 24, 23]
_odd_values = dict((x, values[n]) for n, x in enumerate('0123456789'))
_odd_values.update(
dict((x, values[n]) for n, x in enumerate('ABCDEFGHIJKLMNOPQRSTUVWXYZ')))
del values
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace."""
return clean(number, ' ').strip().upper()
def calc_check_digit(number):
"""Compute the control code for the given number. The passed number
should be the first 15 characters of a fiscal code."""
code = sum(_odd_values[x] if n % 2 == 0 else _even_values[x]
for n, x in enumerate(number))
return 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[code % 26]
def get_birth_date(number, minyear=1920):
"""Get the birth date from the person's fiscal code.
Only the last two digits of the year are stured in the number. The
dates will be returned in the range from minyear to minyear + 100.
>>> get_birth_date('RCCMNL83S18D969H')
datetime.date(1983, 11, 18)
>>> get_birth_date('RCCMNL83S18D969H', minyear=1990)
datetime.date(2083, 11, 18)
"""
number = compact(number)
day = (_date_digits[number[9]] * 10 + _date_digits[number[10]]) % 40
month = _month_digits[number[8]] + 1
year = _date_digits[number[6]] * 10 + _date_digits[number[7]]
# find four-digit year
year += (minyear // 100) * 100
if year < minyear:
year += 100
try:
return datetime.date(year, month, day)
except ValueError:
raise InvalidComponent()
def get_gender(number):
"""Get the gender of the person's fiscal code.
>>> get_gender('RCCMNL83S18D969H')
'M'
>>> get_gender('CNTCHR83T41D969D')
'F'
"""
number = compact(number)
return 'M' if int(number[9:11]) < 32 else 'F'
def validate(number):
"""Checks to see if the given fiscal code is valid. This checks the
length and whether the check digit is correct."""
number = compact(number)
if len(number) != 16:
raise InvalidLength()
if not _code_re.match(number):
raise InvalidFormat()
if calc_check_digit(number[:-1]) != number[-1]:
raise InvalidChecksum()
# check if birth date is valid
birth_date = get_birth_date(number)
return number
def is_valid(number):
"""Checks to see if the given fiscal code is valid. This checks the
length and whether the check digit is correct."""
try:
return bool(validate(number))
except ValidationError:
return False
|
dchoruzy/python-stdnum
|
stdnum/it/codicefiscale.py
|
Python
|
lgpl-2.1
| 5,301
|
# -*- coding: utf-8 -*-
# © 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)nses/agpl).
from odoo.tests.common import TransactionCase
from dateutil.rrule import MONTHLY
class DateRangeGeneratorTest(TransactionCase):
def setUp(self):
super(DateRangeGeneratorTest, self).setUp()
self.type = self.env['date.range.type'].create(
{'name': 'Fiscal year',
'company_id': False,
'allow_overlap': False})
def test_generate(self):
generator = self.env['date.range.generator']
generator = generator.create({
'date_start': '1943-01-01',
'name_prefix': '1943-',
'type_id': self.type.id,
'duration_count': 3,
'unit_of_time': MONTHLY,
'count': 4})
generator.action_apply()
ranges = self.env['date.range'].search(
[('type_id', '=', self.type.id)])
self.assertEquals(len(ranges), 4)
range4 = ranges[3]
self.assertEqual(range4.date_start, '1943-10-01')
self.assertEqual(range4.date_end, '1943-12-31')
self.assertEqual(range4.type_id, self.type)
|
thinkopensolutions/server-tools
|
date_range/tests/test_date_range_generator.py
|
Python
|
agpl-3.0
| 1,203
|
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pygame
from UserList import UserList
from collections import namedtuple
Point = namedtuple('Point', ['x', 'y'])
Size = namedtuple('Size', ['width', 'height'])
class Color(UserList):
def __init__(self, rgbavalue=(0, 0, 0, 255)):
if isinstance(rgbavalue, str):
self._pygameColor = pygame.Color(rgbavalue)
else:
self._pygameColor = pygame.Color(*rgbavalue)
super(Color, self).__init__(self._pygameColor)
def __repr__(self):
return repr(self._pygameColor)
def __str__(self):
return repr(self)
class RenderQueue(object):
def __init__(self, objs=[]):
self._objs = sorted(objs, self.__sort)
def __sort(self, a, b):
return a.z_index - b.z_index
def add(self, o):
# assert isinstance(o, Widget)
self._objs.append(o)
self._objs = sorted(self._objs, self.__sort)
def __iter__(self):
for i in self._objs:
yield i
|
yensa/Nalfein
|
utils.py
|
Python
|
mpl-2.0
| 1,199
|
# Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Module for the inverse function transformation."""
from oryx.core.interpreters.inverse import core
from oryx.core.interpreters.inverse import custom_inverse
from oryx.core.interpreters.inverse import rules
from oryx.core.interpreters.inverse import slice # pylint: disable=redefined-builtin
|
tensorflow/probability
|
spinoffs/oryx/oryx/core/interpreters/inverse/__init__.py
|
Python
|
apache-2.0
| 973
|
# Copyright (c) 2013 Hitachi Data Systems, Inc.
# Copyright (c) 2013 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Self test for Hitachi Unified Storage (HUS) platform.
"""
import mox
import os
import tempfile
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.hds import hds
CONF = """<?xml version="1.0" encoding="UTF-8" ?>
<config>
<mgmt_ip0>172.17.44.16</mgmt_ip0>
<mgmt_ip1>172.17.44.17</mgmt_ip1>
<username>system</username>
<password>manager</password>
<svc_0>
<volume_type>default</volume_type>
<iscsi_ip>172.17.39.132</iscsi_ip>
<hdp>9</hdp>
</svc_0>
<svc_1>
<volume_type>silver</volume_type>
<iscsi_ip>172.17.39.133</iscsi_ip>
<hdp>9</hdp>
</svc_1>
<svc_2>
<volume_type>gold</volume_type>
<iscsi_ip>172.17.39.134</iscsi_ip>
<hdp>9</hdp>
</svc_2>
<svc_3>
<volume_type>platinum</volume_type>
<iscsi_ip>172.17.39.135</iscsi_ip>
<hdp>9</hdp>
</svc_3>
<snapshot>
<hdp>9</hdp>
</snapshot>
<lun_start>
3300
</lun_start>
</config>
"""
class SimulatedHusBackend:
"""Simulation Back end. Talks to HUS."""
alloc_lun = [] # allocated LUs
connections = [] # iSCSI connections
init_index = 0 # initiator index
target_index = 0 # target index
hlun = 0 # hlun index
out = ''
def __init__(self):
self.start_lun = 0
def get_version(self, cmd, ver, ip0, ip1, user, pw):
out = ("Array_ID: 92210013 (HUS130) version: 0920/B-S LU: 4096"
" RG: 75 RG_LU: 1024 Utility_version: 1.0.0")
return out
def get_iscsi_info(self, cmd, ver, ip0, ip1, user, pw):
out = """CTL: 0 Port: 4 IP: 172.17.39.132 Port: 3260 Link: Up
CTL: 0 Port: 5 IP: 172.17.39.133 Port: 3260 Link: Up
CTL: 1 Port: 4 IP: 172.17.39.134 Port: 3260 Link: Up
CTL: 1 Port: 5 IP: 172.17.39.135 Port: 3260 Link: Up"""
return out
def get_hdp_info(self, cmd, ver, ip0, ip1, user, pw):
out = """HDP: 2 272384 MB 33792 MB 12 % LUs: 70 Normal Normal
HDP: 9 546816 MB 73728 MB 13 % LUs: 194 Normal Normal"""
return out
def create_lu(self, cmd, ver, ip0, ip1, user, pw, id, hdp, start,
end, size):
if self.start_lun < int(start): # initialize first time
self.start_lun = int(start)
out = ("LUN: %d HDP: 9 size: %s MB, is successfully created" %
(self.start_lun, size))
self.alloc_lun.append(str(self.start_lun))
self.start_lun += 1
return out
def extend_vol(self, cmd, ver, ip0, ip1, user, pw, id, lu, size):
out = ("LUN: %s successfully extended to %s MB" % (lu, size))
SimulatedHusBackend.out = out
return out
def delete_lu(self, cmd, ver, ip0, ip1, user, pw, id, lun):
out = ""
if lun in self.alloc_lun:
out = "LUN: %s is successfully deleted" % (lun)
self.alloc_lun.remove(lun)
return out
def create_dup(self, cmd, ver, ip0, ip1, user, pw, id, src_lun,
hdp, start, end, size):
out = ("LUN: %s HDP: 9 size: %s MB, is successfully created" %
(self.start_lun, size))
self.alloc_lun.append(str(self.start_lun))
self.start_lun += 1
return out
def add_iscsi_conn(self, cmd, ver, ip0, ip1, user, pw, id, lun, ctl, port,
iqn, initiator):
conn = (self.hlun, lun, initiator, self.init_index, iqn,
self.target_index, ctl, port)
out = ("H-LUN: %d mapped. LUN: %s, iSCSI Initiator: %s @ index: %d, \
and Target: %s @ index %d is successfully paired @ CTL: %s, \
Port: %s" % conn)
self.init_index += 1
self.target_index += 1
self.hlun += 1
SimulatedHusBackend.connections.append(conn)
return out
def del_iscsi_conn(self, cmd, ver, ip0, ip1, user, pw, id, lun, ctl, port,
iqn, initiator):
conn = ()
for connection in SimulatedHusBackend.connections:
if (connection[1] == lun):
conn = connection
SimulatedHusBackend.connections.remove(connection)
if conn is None:
return
(hlun, lun, initiator, init_index, iqn, target_index, ctl, port) = conn
detail = (hlun, iqn)
out = ("H-LUN: %d successfully deleted from target %s" % detail)
return out
# The following information is passed on to tests, when creating a volume
_VOLUME = {'volume_id': '1234567890', 'size': 128,
'volume_type': None, 'provider_location': None, 'id': 'abcdefg'}
class HUSiSCSIDriverTest(test.TestCase):
"""Test HUS iSCSI volume driver."""
def __init__(self, *args, **kwargs):
super(HUSiSCSIDriverTest, self).__init__(*args, **kwargs)
def setUp(self):
super(HUSiSCSIDriverTest, self).setUp()
(handle, self.config_file) = tempfile.mkstemp('.xml')
os.write(handle, CONF)
os.close(handle)
SimulatedHusBackend.alloc_lun = []
SimulatedHusBackend.connections = []
SimulatedHusBackend.out = ''
self.mox = mox.Mox()
self.mox.StubOutWithMock(hds, 'factory_bend')
hds.factory_bend().AndReturn(SimulatedHusBackend())
self.mox.ReplayAll()
self.configuration = mox.MockObject(conf.Configuration)
self.configuration.hds_cinder_config_file = self.config_file
self.driver = hds.HUSDriver(configuration=self.configuration)
def tearDown(self):
os.remove(self.config_file)
self.mox.UnsetStubs()
super(HUSiSCSIDriverTest, self).tearDown()
def test_get_volume_stats(self):
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats["vendor_name"], "HDS")
self.assertEqual(stats["storage_protocol"], "iSCSI")
self.assertTrue(stats["total_capacity_gb"] > 0)
def test_create_volume(self):
loc = self.driver.create_volume(_VOLUME)
self.assertNotEqual(loc, None)
vol = _VOLUME.copy()
vol['provider_location'] = loc['provider_location']
self.assertNotEqual(loc['provider_location'], None)
return vol
def test_delete_volume(self):
"""Delete a volume (test).
Note: this API call should not expect any exception:
This driver will silently accept a delete request, because
the DB can be out of sync, and Cinder manager will keep trying
to delete, even though the volume has been wiped out of the
Array. We don't want to have a dangling volume entry in the
customer dashboard.
"""
vol = self.test_create_volume()
self.assertTrue(SimulatedHusBackend.alloc_lun)
num_luns_before = len(SimulatedHusBackend.alloc_lun)
self.driver.delete_volume(vol)
num_luns_after = len(SimulatedHusBackend.alloc_lun)
self.assertTrue(num_luns_before > num_luns_after)
def test_extend_volume(self):
vol = self.test_create_volume()
new_size = _VOLUME['size'] * 2
self.driver.extend_volume(vol, new_size)
self.assertTrue(str(new_size * 1024) in
SimulatedHusBackend.out)
def test_create_snapshot(self):
vol = self.test_create_volume()
self.mox.StubOutWithMock(self.driver, '_id_to_vol')
self.driver._id_to_vol(vol['volume_id']).AndReturn(vol)
self.mox.ReplayAll()
svol = vol.copy()
svol['volume_size'] = svol['size']
loc = self.driver.create_snapshot(svol)
self.assertNotEqual(loc, None)
svol['provider_location'] = loc['provider_location']
return svol
def test_create_clone(self):
vol = self.test_create_volume()
self.mox.StubOutWithMock(self.driver, '_id_to_vol')
self.driver._id_to_vol(vol['volume_id']).AndReturn(vol)
self.mox.ReplayAll()
svol = vol.copy()
svol['volume_size'] = svol['size']
loc = self.driver.create_snapshot(svol)
self.assertNotEqual(loc, None)
svol['provider_location'] = loc['provider_location']
return svol
def test_delete_snapshot(self):
"""Delete a snapshot (test).
Note: this API call should not expect any exception:
This driver will silently accept a delete request, because
the DB can be out of sync, and Cinder manager will keep trying
to delete, even though the snapshot has been wiped out of the
Array. We don't want to have a dangling snapshot entry in the
customer dashboard.
"""
svol = self.test_create_snapshot()
num_luns_before = len(SimulatedHusBackend.alloc_lun)
self.driver.delete_snapshot(svol)
num_luns_after = len(SimulatedHusBackend.alloc_lun)
self.assertTrue(num_luns_before > num_luns_after)
def test_create_volume_from_snapshot(self):
svol = self.test_create_snapshot()
vol = self.driver.create_volume_from_snapshot(_VOLUME, svol)
self.assertNotEqual(vol, None)
return vol
def test_initialize_connection(self):
connector = {}
connector['initiator'] = 'iqn.1993-08.org.debian:01:11f90746eb2'
connector['host'] = 'dut_1.lab.hds.com'
vol = self.test_create_volume()
self.mox.StubOutWithMock(self.driver, '_update_vol_location')
conn = self.driver.initialize_connection(vol, connector)
self.assertTrue('hitachi' in conn['data']['target_iqn'])
self.assertTrue('3260' in conn['data']['target_portal'])
vol['provider_location'] = conn['data']['provider_location']
return (vol, connector)
def test_terminate_connection(self):
"""Terminate a connection (test).
Note: this API call should not expect any exception:
This driver will silently accept a terminate_connection request
because an error/exception return will only jeopardize the
connection tear down at a host.
"""
(vol, conn) = self.test_initialize_connection()
num_conn_before = len(SimulatedHusBackend.connections)
self.driver.terminate_connection(vol, conn)
num_conn_after = len(SimulatedHusBackend.connections)
self.assertTrue(num_conn_before > num_conn_after)
|
inkerra/cinder
|
cinder/tests/test_hds.py
|
Python
|
apache-2.0
| 11,056
|
from __future__ import print_function
import sys
import numpy as np
import numba.unittest_support as unittest
from numba.compiler import compile_isolated, Flags
from numba import jit, types
from .support import TestCase, MemoryLeakMixin, tag
from numba import testing
enable_pyobj_flags = Flags()
enable_pyobj_flags.set("enable_pyobject")
forceobj_flags = Flags()
forceobj_flags.set("force_pyobject")
no_pyobj_flags = Flags()
def make_consumer(gen_func):
def consumer(x):
res = 0.0
for y in gen_func(x):
res += y
return res
return consumer
def gen1(x):
for i in range(x):
yield i
def gen2(x):
for i in range(x):
yield i
for j in range(1, 3):
yield i + j
def gen3(x):
# Polymorphic yield types must be unified
yield x
yield x + 1.5
yield x + 1j
def gen4(x, y, z):
for i in range(3):
yield z
yield y + z
return
yield x
def gen5():
# The bytecode for this generator doesn't contain any YIELD_VALUE
# (it's optimized away). We fail typing it, since the yield type
# is entirely undefined.
if 0:
yield 1
def gen6(a, b):
# Infinite loop: exercise computation of state variables
x = a + 1
while True:
y = b + 2
yield x + y
def gen7(arr):
# Array variable in generator state
for i in range(arr.size):
yield arr[i]
# Optional arguments and boolean state members
def gen8(x=1, y=2, b=False):
bb = not b
yield x
if bb:
yield y
if b:
yield x + y
def genobj(x):
object()
yield x
def return_generator_expr(x):
return (i * 2 for i in x)
def gen_ndindex(shape):
for ind in np.ndindex(shape):
yield ind
def gen_flat(arr):
for val in arr.flat:
yield val
def gen_ndenumerate(arr):
for tup in np.ndenumerate(arr):
yield tup
class TestGenerators(MemoryLeakMixin, TestCase):
def check_generator(self, pygen, cgen):
self.assertEqual(next(cgen), next(pygen))
# Use list comprehensions to make sure we trash the generator's
# former C stack.
expected = [x for x in pygen]
got = [x for x in cgen]
self.assertEqual(expected, got)
with self.assertRaises(StopIteration):
next(cgen)
def check_gen1(self, flags=no_pyobj_flags):
pyfunc = gen1
cr = compile_isolated(pyfunc, (types.int32,), flags=flags)
pygen = pyfunc(8)
cgen = cr.entry_point(8)
self.check_generator(pygen, cgen)
@tag('important')
def test_gen1(self):
self.check_gen1()
def test_gen1_objmode(self):
self.check_gen1(flags=forceobj_flags)
def check_gen2(self, flags=no_pyobj_flags):
pyfunc = gen2
cr = compile_isolated(pyfunc, (types.int32,), flags=flags)
pygen = pyfunc(8)
cgen = cr.entry_point(8)
self.check_generator(pygen, cgen)
@tag('important')
def test_gen2(self):
self.check_gen2()
def test_gen2_objmode(self):
self.check_gen2(flags=forceobj_flags)
def check_gen3(self, flags=no_pyobj_flags):
pyfunc = gen3
cr = compile_isolated(pyfunc, (types.int32,), flags=flags)
pygen = pyfunc(8)
cgen = cr.entry_point(8)
self.check_generator(pygen, cgen)
@tag('important')
def test_gen3(self):
self.check_gen3()
def test_gen3_objmode(self):
self.check_gen3(flags=forceobj_flags)
def check_gen4(self, flags=no_pyobj_flags):
pyfunc = gen4
cr = compile_isolated(pyfunc, (types.int32,) * 3, flags=flags)
pygen = pyfunc(5, 6, 7)
cgen = cr.entry_point(5, 6, 7)
self.check_generator(pygen, cgen)
@tag('important')
def test_gen4(self):
self.check_gen4()
def test_gen4_objmode(self):
self.check_gen4(flags=forceobj_flags)
def test_gen5(self):
with self.assertTypingError() as cm:
cr = compile_isolated(gen5, ())
self.assertIn("Cannot type generator: it does not yield any value",
str(cm.exception))
def test_gen5_objmode(self):
cr = compile_isolated(gen5, (), flags=forceobj_flags)
cgen = cr.entry_point()
self.assertEqual(list(cgen), [])
with self.assertRaises(StopIteration):
next(cgen)
def check_gen6(self, flags=no_pyobj_flags):
pyfunc = gen6
cr = compile_isolated(pyfunc, (types.int32,) * 2, flags=flags)
cgen = cr.entry_point(5, 6)
l = []
for i in range(3):
l.append(next(cgen))
self.assertEqual(l, [14] * 3)
@tag('important')
def test_gen6(self):
self.check_gen6()
def test_gen6_objmode(self):
self.check_gen6(flags=forceobj_flags)
def check_gen7(self, flags=no_pyobj_flags):
pyfunc = gen7
cr = compile_isolated(pyfunc, (types.Array(types.float64, 1, 'C'),),
flags=flags)
arr = np.linspace(1, 10, 7)
pygen = pyfunc(arr.copy())
cgen = cr.entry_point(arr)
self.check_generator(pygen, cgen)
@tag('important')
def test_gen7(self):
self.check_gen7()
def test_gen7_objmode(self):
self.check_gen7(flags=forceobj_flags)
def check_gen8(self, **jit_args):
pyfunc = gen8
cfunc = jit(**jit_args)(pyfunc)
def check(*args, **kwargs):
self.check_generator(pyfunc(*args, **kwargs),
cfunc(*args, **kwargs))
check(2, 3)
check(4)
check(y=5)
check(x=6, b=True)
@tag('important')
def test_gen8(self):
self.check_gen8(nopython=True)
def test_gen8_objmode(self):
self.check_gen8(forceobj=True)
def check_consume_generator(self, gen_func):
cgen = jit(nopython=True)(gen_func)
cfunc = jit(nopython=True)(make_consumer(cgen))
pyfunc = make_consumer(gen_func)
expected = pyfunc(5)
got = cfunc(5)
self.assertPreciseEqual(got, expected)
def test_consume_gen1(self):
self.check_consume_generator(gen1)
def test_consume_gen2(self):
self.check_consume_generator(gen2)
@tag('important')
def test_consume_gen3(self):
self.check_consume_generator(gen3)
# Check generator storage of some types
def check_ndindex(self, flags=no_pyobj_flags):
pyfunc = gen_ndindex
cr = compile_isolated(pyfunc, (types.UniTuple(types.intp, 2),),
flags=flags)
shape = (2, 3)
pygen = pyfunc(shape)
cgen = cr.entry_point(shape)
self.check_generator(pygen, cgen)
def test_ndindex(self):
self.check_ndindex()
def test_ndindex_objmode(self):
self.check_ndindex(flags=forceobj_flags)
def check_np_flat(self, pyfunc, flags=no_pyobj_flags):
cr = compile_isolated(pyfunc, (types.Array(types.int32, 2, "C"),),
flags=flags)
arr = np.arange(6, dtype=np.int32).reshape((2, 3))
self.check_generator(pyfunc(arr), cr.entry_point(arr))
cr = compile_isolated(pyfunc, (types.Array(types.int32, 2, "A"),),
flags=flags)
arr = arr.T
self.check_generator(pyfunc(arr), cr.entry_point(arr))
def test_np_flat(self):
self.check_np_flat(gen_flat)
def test_np_flat_objmode(self):
self.check_np_flat(gen_flat, flags=forceobj_flags)
def test_ndenumerate(self):
self.check_np_flat(gen_ndenumerate)
def test_ndenumerate_objmode(self):
self.check_np_flat(gen_ndenumerate, flags=forceobj_flags)
class TestGenExprs(MemoryLeakMixin, TestCase):
@testing.allow_interpreter_mode
def test_return_generator_expr(self):
pyfunc = return_generator_expr
cr = compile_isolated(pyfunc, ())
cfunc = cr.entry_point
self.assertEqual(sum(cfunc([1, 2, 3])), sum(pyfunc([1, 2, 3])))
def nrt_gen0(ary):
for elem in ary:
yield elem
def nrt_gen1(ary1, ary2):
for e1, e2 in zip(ary1, ary2):
yield e1
yield e2
class TestNrtArrayGen(MemoryLeakMixin, TestCase):
def test_nrt_gen0(self):
pygen = nrt_gen0
cgen = jit(nopython=True)(pygen)
py_ary = np.arange(10)
c_ary = py_ary.copy()
py_res = list(pygen(py_ary))
c_res = list(cgen(c_ary))
np.testing.assert_equal(py_ary, c_ary)
self.assertEqual(py_res, c_res)
# Check reference count
self.assertEqual(sys.getrefcount(py_ary),
sys.getrefcount(c_ary))
def test_nrt_gen1(self):
pygen = nrt_gen1
cgen = jit(nopython=True)(pygen)
py_ary1 = np.arange(10)
py_ary2 = py_ary1 + 100
c_ary1 = py_ary1.copy()
c_ary2 = py_ary2.copy()
py_res = list(pygen(py_ary1, py_ary2))
c_res = list(cgen(c_ary1, c_ary2))
np.testing.assert_equal(py_ary1, c_ary1)
np.testing.assert_equal(py_ary2, c_ary2)
self.assertEqual(py_res, c_res)
# Check reference count
self.assertEqual(sys.getrefcount(py_ary1),
sys.getrefcount(c_ary1))
self.assertEqual(sys.getrefcount(py_ary2),
sys.getrefcount(c_ary2))
def test_combine_gen0_gen1(self):
"""
Issue #1163 is observed when two generator with NRT object arguments
is ran in sequence. The first one does a invalid free and corrupts
the NRT memory subsystem. The second generator is likely to segfault
due to corrupted NRT data structure (an invalid MemInfo).
"""
self.test_nrt_gen0()
self.test_nrt_gen1()
def test_nrt_gen0_stop_iteration(self):
"""
Test cleanup on StopIteration
"""
pygen = nrt_gen0
cgen = jit(nopython=True)(pygen)
py_ary = np.arange(1)
c_ary = py_ary.copy()
py_iter = pygen(py_ary)
c_iter = cgen(c_ary)
py_res = next(py_iter)
c_res = next(c_iter)
with self.assertRaises(StopIteration):
py_res = next(py_iter)
with self.assertRaises(StopIteration):
c_res = next(c_iter)
del py_iter
del c_iter
np.testing.assert_equal(py_ary, c_ary)
self.assertEqual(py_res, c_res)
# Check reference count
self.assertEqual(sys.getrefcount(py_ary),
sys.getrefcount(c_ary))
def test_nrt_gen0_no_iter(self):
"""
Test cleanup for a initialized but never iterated (never call next())
generator.
"""
pygen = nrt_gen0
cgen = jit(nopython=True)(pygen)
py_ary = np.arange(1)
c_ary = py_ary.copy()
py_iter = pygen(py_ary)
c_iter = cgen(c_ary)
del py_iter
del c_iter
np.testing.assert_equal(py_ary, c_ary)
# Check reference count
self.assertEqual(sys.getrefcount(py_ary),
sys.getrefcount(c_ary))
# TODO: fix nested generator and MemoryLeakMixin
class TestNrtNestedGen(TestCase):
def test_nrt_nested_gen(self):
def gen0(arr):
for i in range(arr.size):
yield arr
def factory(gen0):
def gen1(arr):
out = np.zeros_like(arr)
for x in gen0(arr):
out = out + x
return out, arr
return gen1
py_arr = np.arange(10)
c_arr = py_arr.copy()
py_res, py_old = factory(gen0)(py_arr)
c_gen = jit(nopython=True)(factory(jit(nopython=True)(gen0)))
c_res, c_old = c_gen(c_arr)
self.assertIsNot(py_arr, c_arr)
self.assertIs(py_old, py_arr)
self.assertIs(c_old, c_arr)
np.testing.assert_equal(py_res, c_res)
self.assertEqual(sys.getrefcount(py_res),
sys.getrefcount(c_res))
# The below test will fail due to generator finalizer not invoked.
# This kept a reference of the c_old.
#
# self.assertEqual(sys.getrefcount(py_old),
# sys.getrefcount(c_old))
@unittest.expectedFailure
def test_nrt_nested_gen_refct(self):
def gen0(arr):
yield arr
def factory(gen0):
def gen1(arr):
for out in gen0(arr):
return out
return gen1
py_arr = np.arange(10)
c_arr = py_arr.copy()
py_old = factory(gen0)(py_arr)
c_gen = jit(nopython=True)(factory(jit(nopython=True)(gen0)))
c_old = c_gen(c_arr)
self.assertIsNot(py_arr, c_arr)
self.assertIs(py_old, py_arr)
self.assertIs(c_old, c_arr)
self.assertEqual(sys.getrefcount(py_old),
sys.getrefcount(c_old))
def test_nrt_nested_nopython_gen(self):
"""
Test nesting three generators
"""
def factory(decor=lambda x: x):
@decor
def foo(a, n):
for i in range(n):
yield a[i]
a[i] += i
@decor
def bar(n):
a = np.arange(n)
for i in foo(a, n):
yield i * 2
for i in range(a.size):
yield a[i]
@decor
def cat(n):
for i in bar(n):
yield i + i
return cat
py_gen = factory()
c_gen = factory(jit(nopython=True))
py_res = list(py_gen(10))
c_res = list(c_gen(10))
self.assertEqual(py_res, c_res)
class TestGeneratorWithNRT(MemoryLeakMixin, TestCase):
def test_issue_1254(self):
"""
Missing environment for returning array
"""
@jit(nopython=True)
def random_directions(n):
for i in range(n):
vec = np.empty(3)
vec[:] = 12
yield vec
outputs = list(random_directions(5))
self.assertEqual(len(outputs), 5)
expect = np.empty(3)
expect[:] = 12
for got in outputs:
np.testing.assert_equal(expect, got)
def test_issue_1265(self):
"""
Double-free for locally allocated, non escaping NRT objects
"""
def py_gen(rmin, rmax, nr):
a = np.linspace(rmin, rmax, nr)
yield a[0]
yield a[1]
c_gen = jit(nopython=True)(py_gen)
py_res = list(py_gen(-2, 2, 100))
c_res = list(c_gen(-2, 2, 100))
self.assertEqual(py_res, c_res)
def py_driver(args):
rmin, rmax, nr = args
points = np.empty(nr, dtype=np.complex128)
for i, c in enumerate(py_gen(rmin, rmax, nr)):
points[i] = c
return points
@jit(nopython=True)
def c_driver(args):
rmin, rmax, nr = args
points = np.empty(nr, dtype=np.complex128)
for i, c in enumerate(c_gen(rmin, rmax, nr)):
points[i] = c
return points
n = 2
patches = (-2, -1, n)
py_res = py_driver(patches)
# The error will cause a segfault here
c_res = c_driver(patches)
np.testing.assert_equal(py_res, c_res)
if __name__ == '__main__':
unittest.main()
|
stefanseefeld/numba
|
numba/tests/test_generators.py
|
Python
|
bsd-2-clause
| 15,547
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-10 04:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('coffee', '0002_auto_20170310_0429'),
]
operations = [
migrations.AlterField(
model_name='bean',
name='bean_type',
field=models.IntegerField(choices=[(0, 'Blend'), (1, 'Single Origin'), (2, 'Espresso'), (3, 'Seasonal')], default=0, null=True),
),
migrations.AlterField(
model_name='bean',
name='roast_level',
field=models.IntegerField(choices=[(0, 'Light Roast'), (1, 'Medium Roast'), (2, 'Medium/Dark Roast'), (3, 'Dark Roast')], default=1, null=True),
),
]
|
greg-ruane/coffee-catalog
|
src/coffee/migrations/0003_auto_20170310_0431.py
|
Python
|
mit
| 803
|
def can_build(env, platform):
return True
def configure(env):
pass
|
NateWardawg/godot
|
modules/webp/config.py
|
Python
|
mit
| 76
|
from datetime import time
from http.client import OK, BAD_REQUEST
from io import BytesIO
from unittest.mock import patch
from django.contrib.auth import authenticate
from django.core.urlresolvers import reverse
from django.test import TestCase
import xlwt
from email_user.tests.factories import EmailUserFactory
from services.import_export import validate_and_import_data, get_export_workbook, PROVIDER_HEADINGS, \
PROVIDER_SHEET_NAME, SERVICES_SHEET_NAME, SELECTION_CRITERIA_SHEET_NAME, add_models_to_sheet, \
SERVICE_HEADINGS, SELECTION_CRITERIA_HEADINGS
from services.models import Provider, Service, SelectionCriterion
from services.tests.factories import ProviderFactory, ProviderTypeFactory, ServiceFactory, \
ServiceTypeFactory, ServiceAreaFactory, SelectionCriterionFactory
from services.tests.test_api import APITestMixin
from services.tests.test_export import get_book_bits
VERY_LONG_STRING = 'x' * 1024
def make_empty_book():
"""
Return an xlwt Workbook object with our sheets & column
headings, but no data.
:return: an xlwt Workbook object
"""
return get_export_workbook([])
class ValidateImportTest(TestCase):
def setUp(self):
self.user = EmailUserFactory()
def test_not_spreadsheet(self):
errs = validate_and_import_data(self.user, b'I am not a spreadsheet')
self.assertTrue(errs)
def test_too_few_sheets(self):
xlwt_book = xlwt.Workbook(encoding='utf-8')
xlwt_book.add_sheet(PROVIDER_SHEET_NAME)
xlwt_book.add_sheet(SERVICES_SHEET_NAME)
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_too_many_sheets(self):
xlwt_book = xlwt.Workbook(encoding='utf-8')
xlwt_book.add_sheet(PROVIDER_SHEET_NAME)
xlwt_book.add_sheet(SERVICES_SHEET_NAME)
xlwt_book.add_sheet(PROVIDER_SHEET_NAME + 'b')
xlwt_book.add_sheet(SERVICES_SHEET_NAME + 'b')
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_empty_book(self):
# A book with just 3 empty sheets should not validate
xlwt_book = xlwt.Workbook(encoding='utf-8')
xlwt_book.add_sheet('x' + PROVIDER_SHEET_NAME)
xlwt_book.add_sheet(SERVICES_SHEET_NAME)
xlwt_book.add_sheet(SELECTION_CRITERIA_SHEET_NAME)
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_headers_only_book(self):
# An book with only headers should validate
xlwt_book = make_empty_book()
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertFalse(errs)
def test_bad_provider_headers(self):
with patch('services.import_export.PROVIDER_HEADINGS') as headings:
headings[:] = ['foo', 'bar']
xlwt_book = make_empty_book()
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_bad_service_headers(self):
with patch('services.import_export.SERVICE_HEADINGS') as headings:
headings[:] = ['foo', 'bar']
xlwt_book = make_empty_book()
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_bad_criteria_headers(self):
with patch('services.import_export.SELECTION_CRITERIA_HEADINGS') as headings:
headings[:] = ['foo', 'bar']
xlwt_book = make_empty_book()
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_bad_provider_sheet_name(self):
# Wrong sheet name should not validate
xlwt_book = xlwt.Workbook(encoding='utf-8')
provider_sheet = xlwt_book.add_sheet('x' + PROVIDER_SHEET_NAME)
add_models_to_sheet(provider_sheet, PROVIDER_HEADINGS, [])
service_sheet = xlwt_book.add_sheet(SERVICES_SHEET_NAME)
add_models_to_sheet(service_sheet, SERVICE_HEADINGS, [])
criteria_sheet = xlwt_book.add_sheet(SELECTION_CRITERIA_SHEET_NAME)
add_models_to_sheet(criteria_sheet, SELECTION_CRITERIA_HEADINGS, [])
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_bad_services_sheet_name(self):
# Wrong sheet name should not validate
xlwt_book = xlwt.Workbook(encoding='utf-8')
provider_sheet = xlwt_book.add_sheet(PROVIDER_SHEET_NAME)
add_models_to_sheet(provider_sheet, PROVIDER_HEADINGS, [])
service_sheet = xlwt_book.add_sheet('x' + SERVICES_SHEET_NAME)
add_models_to_sheet(service_sheet, SERVICE_HEADINGS, [])
criteria_sheet = xlwt_book.add_sheet(SELECTION_CRITERIA_SHEET_NAME)
add_models_to_sheet(criteria_sheet, SELECTION_CRITERIA_HEADINGS, [])
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def test_bad_criteria_sheet_name(self):
# Wrong sheet name should not validate
xlwt_book = xlwt.Workbook(encoding='utf-8')
provider_sheet = xlwt_book.add_sheet(PROVIDER_SHEET_NAME)
add_models_to_sheet(provider_sheet, PROVIDER_HEADINGS, [])
service_sheet = xlwt_book.add_sheet(SERVICES_SHEET_NAME)
add_models_to_sheet(service_sheet, SERVICE_HEADINGS, [])
criteria_sheet = xlwt_book.add_sheet('x' + SELECTION_CRITERIA_SHEET_NAME)
add_models_to_sheet(criteria_sheet, SELECTION_CRITERIA_HEADINGS, [])
errs = validate_and_import_data(self.user, get_book_bits(xlwt_book))
self.assertTrue(errs)
def set_cell_value(book, sheet_num, row_num, col_num, value):
sheet = book.get_sheet(sheet_num)
sheet.write(r=row_num, c=col_num, label=value)
def blank_out_row_for_testing(book, sheet_num, row_num):
sheet = book.get_sheet(sheet_num)
num_cols = sheet.rows[row_num].get_cells_count()
# We always put the id in the first column, so skip that
for col in range(1, num_cols):
sheet.write(r=row_num, c=col, label='')
class ImportWorkbookAPITest(APITestMixin, TestCase):
def import_book(self, book):
"""
Given an xlwt Workbook object, call the import API
and return the response object.
"""
bits = get_book_bits(book)
url = reverse('import')
with BytesIO(bits) as fp:
fp.name = 'book.xls'
rsp = self.post_with_token(
url,
data={'file': fp},
format='multipart',
)
return rsp
def test_import_empty_book(self):
xlwt_book = make_empty_book()
rsp = self.import_book(xlwt_book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
def test_provider_add_provider(self):
type = ProviderTypeFactory()
provider = ProviderFactory.build(type=type, user=self.user) # Doesn't save
self.assertFalse(provider.id)
book = get_export_workbook([provider])
rsp = self.import_book(book)
# self.fail(rsp.content.decode('utf-8'))
self.assertContains(rsp, "Non-staff users may not create new providers",
status_code=BAD_REQUEST)
def test_staff_add_provider(self):
type = ProviderTypeFactory()
self.user.is_staff = True
self.user.save()
provider = ProviderFactory.build(type=type, user=self.user) # Doesn't save
self.assertFalse(provider.id)
book = get_export_workbook([provider])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertTrue(Provider.objects.filter(name_en=provider.name_en).exists())
def test_staff_add_bad_provider(self):
type = ProviderTypeFactory()
self.user.is_staff = True
self.user.save()
provider = ProviderFactory.build(type=type, user=self.user,
number_of_monthly_beneficiaries=-1) # Doesn't save
self.assertFalse(provider.id)
book = get_export_workbook([provider])
rsp = self.import_book(book)
self.assertContains(rsp,
"Row 2: number_of_monthly_beneficiaries: Ensure this value is "
"greater than or equal to 0.",
status_code=BAD_REQUEST)
def test_staff_add_providers(self):
# Remember, only one provider per user
self.user.is_staff = True
self.user.save()
type1 = ProviderTypeFactory()
provider1 = ProviderFactory.build(type=type1, user=self.user) # Doesn't save
user2 = EmailUserFactory()
type2 = ProviderTypeFactory()
provider2 = ProviderFactory.build(type=type2, user=user2) # Doesn't save
book = get_export_workbook([provider1, provider2])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertTrue(Provider.objects.filter(name_en=provider1.name_en).exists())
self.assertTrue(Provider.objects.filter(name_en=provider2.name_en).exists())
def test_provider_change_own_data(self):
# Non-staff can change their own provider
provider = ProviderFactory(user=self.user)
# Tweak some data
provider.name_en = 'Jim-Bob'
provider.name_ar = 'Ahmed-Bob'
provider.name_fr = 'Pierre-Bob'
book = get_export_workbook([provider])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_provider = Provider.objects.get(id=provider.id)
self.assertEqual(provider.name_en, new_provider.name_en)
self.assertEqual(provider.name_ar, new_provider.name_ar)
self.assertEqual(provider.name_fr, new_provider.name_fr)
def test_staff_change_provider(self):
# Staff can change another user's provider
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
# Tweak some data
provider.name_en = 'Jim-Bob'
provider.name_ar = 'Ahmed-Bob'
provider.name_fr = 'Pierre-Bob'
book = get_export_workbook([provider])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_provider = Provider.objects.get(id=provider.id)
self.assertEqual(provider.name_en, new_provider.name_en)
self.assertEqual(provider.name_ar, new_provider.name_ar)
self.assertEqual(provider.name_fr, new_provider.name_fr)
def test_staff_change_provider_invalid_id(self):
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
# Tweak some data
provider.name_en = 'Jim-Bob'
provider.name_ar = 'Ahmed-Bob'
provider.name_fr = 'Pierre-Bob'
book = get_export_workbook([provider], cell_overwrite_ok=True)
sheet = book.get_sheet(0)
sheet.write(r=1, c=0, label='xyz')
rsp = self.import_book(book)
self.assertContains(rsp,
"id: xyz is not a valid ID",
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_staff_change_nonexistent_provider(self):
# Staff can change another user's provider
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
# Tweak some data
provider.name_en = 'Jim-Bob'
provider.name_ar = 'Ahmed-Bob'
provider.name_fr = 'Pierre-Bob'
book = get_export_workbook([provider])
provider_id = provider.id
provider.delete()
rsp = self.import_book(book)
self.assertContains(rsp,
"There is no provider with id=%d" % provider_id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_staff_change_providers(self):
# Staff can change multiple providers
self.user.is_staff = True
self.user.save()
provider1 = ProviderFactory()
provider2 = ProviderFactory()
# Tweak some data
provider1.name_en = 'Jim-Bob'
provider1.name_ar = 'Ahmed-Bob'
provider1.name_fr = 'Pierre-Bob'
provider2.number_of_monthly_beneficiaries = 1024
provider2.type = ProviderTypeFactory()
book = get_export_workbook([provider1, provider2])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_provider1 = Provider.objects.get(id=provider1.id)
self.assertEqual(provider1.name_en, new_provider1.name_en)
self.assertEqual(provider1.name_ar, new_provider1.name_ar)
self.assertEqual(provider1.name_fr, new_provider1.name_fr)
new_provider2 = Provider.objects.get(id=provider2.id)
self.assertEqual(provider2.number_of_monthly_beneficiaries,
new_provider2.number_of_monthly_beneficiaries)
def test_provider_add_service(self):
# A provider can create a new service for themselves
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory.build(provider=provider, type=type, area_of_service=area,
tuesday_open=time(6, 59),
tuesday_close=time(21, 2))
self.assertIsNotNone(service.location)
criterion = SelectionCriterionFactory.build(
service=service
)
book = get_export_workbook([provider], [service], [criterion])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_service = Service.objects.get(name_en=service.name_en)
self.assertEqual(new_service.name_en, service.name_en)
self.assertTrue(SelectionCriterion.objects.filter(service=new_service,
text_en=criterion.text_en
).exists())
self.assertIsNotNone(new_service.location)
self.assertEqual(service.location, new_service.location)
self.assertEqual(service.tuesday_open, new_service.tuesday_open)
self.assertEqual(service.tuesday_close, new_service.tuesday_close)
def test_provider_add_bad_service(self):
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory.build(provider=provider, type=type, area_of_service=area,
name_en=VERY_LONG_STRING,
tuesday_open=time(6, 59),
tuesday_close=time(21, 2))
self.assertIsNotNone(service.location)
criterion = SelectionCriterionFactory.build(
service=service
)
book = get_export_workbook([provider], [service], [criterion])
rsp = self.import_book(book)
self.assertEqual(BAD_REQUEST, rsp.status_code, msg=rsp.content.decode('utf-8'))
def test_provider_add_anothers_service(self):
# A provider can't add a service to another provider
provider = ProviderFactory()
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory.build(provider=provider, type=type, area_of_service=area)
book = get_export_workbook([provider], [service])
rsp = self.import_book(book)
self.assertEqual(BAD_REQUEST, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertContains(rsp, "%d is not a provider this user may import" % provider.id,
status_code=BAD_REQUEST)
self.assertContains(rsp, "Non-staff users may not create services for other providers",
status_code=BAD_REQUEST)
def test_provider_change_service(self):
# A provider can change their existing service
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
service.name_en = 'Radiator Repair'
service.name_fr = 'Le Marseilles'
book = get_export_workbook([provider], [service])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_service = Service.objects.get(id=service.id)
self.assertEqual(service.name_en, new_service.name_en)
self.assertEqual(service.name_fr, new_service.name_fr)
def test_provider_change_nonexistent_service(self):
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
service.name_en = 'Radiator Repair'
service.name_fr = 'Le Marseilles'
book = get_export_workbook([provider], [service])
service_id = service.id
service.delete()
rsp = self.import_book(book)
self.assertContains(rsp, "%d is not a service this user may import" % service_id,
status_code=BAD_REQUEST)
def test_provider_change_anothers_service(self):
# A provider cannot change another provider's existing service
provider = ProviderFactory()
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
service.name_en = 'Radiator Repair'
service.name_fr = 'Le Marseilles'
book = get_export_workbook([provider], [service])
rsp = self.import_book(book)
# self.fail(rsp.content.decode('utf-8'))
self.assertEqual(BAD_REQUEST, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertContains(rsp, "%d is not a provider this user may import" % provider.id,
status_code=BAD_REQUEST)
self.assertContains(rsp, "%d is not a service this user may import" % service.id,
status_code=BAD_REQUEST)
def test_staff_add_services(self):
# Staff can add services to any provider
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory.build(provider=provider, type=type, area_of_service=area)
book = get_export_workbook([provider], [service])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_service = Service.objects.get(name_en=service.name_en)
self.assertEqual(new_service.name_en, service.name_en)
def test_staff_change_services(self):
# Staff can change anyone's service
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
service.name_en = 'Radiator Repair'
service.name_fr = 'Le Marseilles'
book = get_export_workbook([provider], [service])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
new_service = Service.objects.get(id=service.id)
self.assertEqual(service.name_en, new_service.name_en)
self.assertEqual(service.name_fr, new_service.name_fr)
def test_provider_add_criteria(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory(service=service)
criterion2 = SelectionCriterionFactory.build(service=service, text_en="New Criterion!")
book = get_export_workbook([provider], None, [criterion1, criterion2])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
# Existing one still there
self.assertTrue(SelectionCriterion.objects.filter(
service=service,
text_en=criterion1.text_en,
id=criterion1.id
).exists())
# New one added
self.assertTrue(SelectionCriterion.objects.filter(
service=service,
text_en=criterion2.text_en
).exists())
def test_provider_remove_criteria(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory(service=service)
criterion2 = SelectionCriterionFactory(service=service)
book = get_export_workbook([provider], None, [criterion1, criterion2],
cell_overwrite_ok=True)
# Blank out the 2nd one's data to indicate it should be deleted
blank_out_row_for_testing(book, sheet_num=2, row_num=2)
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
# 1st one still there
self.assertTrue(SelectionCriterion.objects.filter(
service=service,
text_en=criterion1.text_en,
id=criterion1.id
).exists())
# 2nd one removed
self.assertFalse(SelectionCriterion.objects.filter(id=criterion2.id).exists())
def test_provider_change_criteria(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory(service=service)
criterion2 = SelectionCriterionFactory(service=service)
# Change the 2nd one's text before exporting
criterion2.text_en = criterion2.text_ar = criterion2.text_fr = 'Oh dear me'
book = get_export_workbook([provider], None, [criterion1, criterion2])
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
# 1st one still there
self.assertTrue(SelectionCriterion.objects.filter(
service=service,
text_en=criterion1.text_en,
id=criterion1.id
).exists())
# 2nd one changed
crit2 = SelectionCriterion.objects.get(id=criterion2.id)
self.assertEqual(crit2.text_en, criterion2.text_en)
self.assertEqual(crit2.text_ar, criterion2.text_ar)
self.assertEqual(crit2.text_fr, criterion2.text_fr)
def test_provider_change_nonexistent_criterion(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory(service=service)
book = get_export_workbook([provider], None, [criterion1])
crit_id = criterion1.id
criterion1.delete()
rsp = self.import_book(book)
self.assertContains(rsp, "Row 2: id: No selection criterion with id = %s" % crit_id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_bad_criterion_id(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory.build(service=service)
criterion1.id = 'abc'
book = get_export_workbook([provider], None, [criterion1])
rsp = self.import_book(book)
self.assertContains(rsp, "Row 2: id: %s is not a valid ID" % criterion1.id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_bad_criteria(self):
provider = ProviderFactory(user=self.user)
service = ServiceFactory(provider=provider, status=Service.STATUS_CURRENT)
criterion1 = SelectionCriterionFactory(service=service)
criterion2 = SelectionCriterionFactory(service=service)
# Change the 2nd one's text before exporting
criterion2.text_en = criterion2.text_ar = criterion2.text_fr = ''
book = get_export_workbook([provider], None, [criterion1, criterion2])
rsp = self.import_book(book)
self.assertContains(rsp, "Selection criterion must have text in at least one language",
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_add_criterion_bad_service(self):
provider = ProviderFactory(user=self.user)
criterion1 = SelectionCriterionFactory.build()
service = criterion1.service
book = get_export_workbook([provider], None, [criterion1])
rsp = self.import_book(book)
self.assertContains(rsp,
"Row 2: service__id: Selection criterion refers to service with ID "
"or name '%s' that is not in the 2nd sheet" % service.name_en,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_delete_service(self):
# A provider can delete their existing service
# by blanking out all the fields except id
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
self.assertTrue(Service.objects.filter(id=service.id).exists())
book = get_export_workbook([provider], [service], cell_overwrite_ok=True)
# Now blank out everything about the service except its 'id'
blank_out_row_for_testing(book, sheet_num=1, row_num=1)
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertFalse(Service.objects.filter(id=service.id).exists())
def test_provider_delete_nonexistent_service(self):
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(provider=provider, type=type, area_of_service=area)
self.assertTrue(Service.objects.filter(id=service.id).exists())
book = get_export_workbook([provider], [service], cell_overwrite_ok=True)
service_id = service.id
service.delete()
# Now blank out everything about the service except its 'id'
blank_out_row_for_testing(book, sheet_num=1, row_num=1)
rsp = self.import_book(book)
self.assertContains(rsp,
"Row 2: service: %d is not a service this user may delete" % service_id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_delete_anothers_service(self):
# A provider cannot delete someone else's service
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(type=type, area_of_service=area)
self.assertTrue(Service.objects.filter(id=service.id).exists())
book = get_export_workbook([provider], [service], cell_overwrite_ok=True)
# Now blank out everything about the service except its 'id'
blank_out_row_for_testing(book, sheet_num=1, row_num=1)
rsp = self.import_book(book)
self.assertContains(rsp, "%d is not a service this user may delete" % service.id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_staff_delete_service(self):
# A staffer can delete someone else's service
self.user.is_staff = True
self.user.save()
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(type=type, area_of_service=area)
self.assertTrue(Service.objects.filter(id=service.id).exists())
book = get_export_workbook([provider], [service], cell_overwrite_ok=True)
# Now blank out everything about the service except its 'id'
blank_out_row_for_testing(book, sheet_num=1, row_num=1)
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertFalse(Service.objects.filter(id=service.id).exists())
def test_staff_delete_nonexistent_service(self):
self.user.is_staff = True
self.user.save()
provider = ProviderFactory(user=self.user)
type = ServiceTypeFactory()
area = ServiceAreaFactory()
service = ServiceFactory(type=type, area_of_service=area)
self.assertTrue(Service.objects.filter(id=service.id).exists())
book = get_export_workbook([provider], [service], cell_overwrite_ok=True)
service_id = service.id
service.delete()
# Now blank out everything about the service except its 'id'
blank_out_row_for_testing(book, sheet_num=1, row_num=1)
rsp = self.import_book(book)
self.assertContains(rsp,
"No service with id=%d" % service_id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_delete_provider(self):
# A provider cannot delete themselves
provider = ProviderFactory(user=self.user)
book = get_export_workbook([provider], cell_overwrite_ok=True)
blank_out_row_for_testing(book, sheet_num=0, row_num=1)
rsp = self.import_book(book)
self.assertContains(rsp, "Only staff may delete providers",
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_provider_delete_another_provider(self):
# A provider cannot delete others
provider = ProviderFactory()
book = get_export_workbook([provider], cell_overwrite_ok=True)
blank_out_row_for_testing(book, sheet_num=0, row_num=1)
rsp = self.import_book(book)
self.assertContains(rsp,
"provider: %d is not a provider this user may delete" % provider.id,
status_code=BAD_REQUEST,
msg_prefix=rsp.content.decode('utf-8'))
def test_staff_delete_provider(self):
# Staff may delete providers
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
book = get_export_workbook([provider], cell_overwrite_ok=True)
blank_out_row_for_testing(book, sheet_num=0, row_num=1)
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
self.assertFalse(Provider.objects.filter(id=provider.id).exists())
def test_provider_change_password(self):
# Providers can change their password
provider = ProviderFactory(user=self.user)
book = get_export_workbook([provider], cell_overwrite_ok=True)
password_column = PROVIDER_HEADINGS.index('password')
set_cell_value(book, 0, 1, password_column, 'new_password')
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
user = authenticate(email=provider.user.email,
password='new_password')
self.assertEqual(user, self.user)
def test_provider_change_anothers_password(self):
# Providers cannot change another provider's password
provider = ProviderFactory()
book = get_export_workbook([provider], cell_overwrite_ok=True)
password_column = PROVIDER_HEADINGS.index('password')
set_cell_value(book, 0, 1, password_column, 'new_password')
rsp = self.import_book(book)
self.assertEqual(BAD_REQUEST, rsp.status_code, msg=rsp.content.decode('utf-8'))
user = authenticate(email=provider.user.email,
password='new_password')
self.assertIsNone(user)
def test_staff_change_provider_password(self):
# Staff can change anyone's password
self.user.is_staff = True
self.user.save()
provider = ProviderFactory()
book = get_export_workbook([provider], cell_overwrite_ok=True)
password_column = PROVIDER_HEADINGS.index('password')
set_cell_value(book, 0, 1, password_column, 'new_password')
rsp = self.import_book(book)
self.assertEqual(OK, rsp.status_code, msg=rsp.content.decode('utf-8'))
user = authenticate(email=provider.user.email,
password='new_password')
self.assertEqual(user, provider.user)
|
theirc/ServiceInfo
|
services/tests/test_import.py
|
Python
|
bsd-3-clause
| 33,398
|
from toee import *
from utilities import *
from Co8 import *
from familiar_protos import familiar_table # modularize for KotB
def OnBeginSpellCast( spell ):
print "Summon Familiar OnBeginSpellCast"
print("Removing caster from target list")
spell.target_list.remove_target(spell.caster) # added because OnBeginRound can fire before OnSpellEffect
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
#game.particles( "sp-conjuration-conjure", spell.caster )
def OnSpellEffect( spell ):
print "Summon Familiar OnSpellEffect"
spell.duration = 2147483647
master = spell.caster
# get familiar inventory object handle
inv_proto = FindFamiliarProto( spell.caster, 0 )
familiar = spell.caster.item_find_by_proto( inv_proto )
if ( get_ID( familiar ) != 0 ):
spell.spell_end( spell.id , 1)
return SKIP_DEFAULT
# get the proto_id for this familiar
familiar_proto_id = FindFamiliarProto( spell.caster, 1 )
if (familiar_proto_id == 0): # not a recognized familiar type
spell.spell_end( spell.id , 1)
return SKIP_DEFAULT
# creates random ID number
ID_number = game.random_range( 1,2147483647 )
ID_number = ID_number^game.random_range( 1,2147483647 )#xor with next "random" number in line, should be more random
# create familiar
spell.summon_monsters( 1, familiar_proto_id )
# get familiar's handle
familiar_obj = GetCritterHandle( spell, familiar_proto_id )
if ( familiar_obj == OBJ_HANDLE_NULL ): # no new familiar present
return SKIP_DEFAULT
# summoning effect
#game.particles( 'Orb-Summon-Air-Elemental', familiar_obj )
# assigns familiar ownership
set_ID( familiar_obj, ID_number )
set_ID( familiar, ID_number )
#game.particles( "sp-summon monster II", game.party[0] )
# sets familiar's stat's and bonuses depending on it's masters level
master_level = GetLevel( spell.caster )
f_level = ( ( master_level + 1 ) / 2 )
f_hp = ( ( spell.caster.stat_level_get( stat_hp_max ) ) / 2 ) ## familiar's hp = i/2 masters hp
base_hp = familiar_obj.stat_level_get( stat_hp_max ) ## familiar's base hp from proto
prev_max_hp = familiar.obj_get_int( obj_f_item_pad_i_1 ) ## familiar's max hp from last time summoned ( 0 if never summoned before)
prev_curr_hp = familiar.obj_get_int( obj_f_item_pad_i_2 ) ## familiar's current xp from last time stowed ( 0 if never summoed before)
if ( base_hp <= f_hp ): ## if 1/2 master's hp is greater than base hp from proto, will use 1/2 masters hp
new_hp = familiar_obj.stat_base_set( stat_hp_max, f_hp )
curr_max_hp = familiar_obj.stat_level_get( stat_hp_max ) ## familiar's max hp from current summons
hp_diff = ( curr_max_hp - prev_max_hp ) ## difference between max hp from last time summoned and max hp now ( 0 if master has not gained a level since)
if ( prev_max_hp != 0): ## has been summoned before
if ( hp_diff >=1 ): ## adds gained hp if master has gained hp since last time summoned
hp_now = prev_curr_hp + hp_diff
else:
hp_now = prev_curr_hp
dam = dice_new("1d1")
dam.num = curr_max_hp - hp_now
if (dam.num >= 1):
familiar_obj.damage(OBJ_HANDLE_NULL, D20DT_FORCE, dam, D20DAP_NORMAL)
## This next bit gives the familiar it's masters BAB. The familiar should have a BAB (without the masters BAB) of zero, but since
## the game engine doesn't allow for Weapon Finesse with natural attacks( which would let the familiar use their dexterity modifier
## instead of the strength modifier), I fiddled with the "to hit" in the protos to counteract the negative attack modifier do to
## low strength and add the dexterity modifier. - Ceruleran the Blue ##
f_to_hit = spell.caster.stat_base_get(stat_attack_bonus)
new_to_hit = familiar_obj.condition_add_with_args( 'To Hit Bonus', f_to_hit, 0 )
new_int = familiar_obj.stat_base_set( stat_intelligence, ( 5 + f_level ) ) ## familiar INT bonus
armor = familiar_obj.obj_set_int( obj_f_npc_ac_bonus, (f_level) ) ## Natrual Armor bonus
if ( master_level >= 11 ):
spell_resistance = familiar_obj.condition_add_with_args( 'Monster Spell Resistance', ( 5 + master_level ), 0 ) ## spell resistance
## familiar uses masters saving throw bonuses if they are higher than it's own.
fortitude_bonus = Fortitude( spell.caster )
if ( fortitude_bonus >= 3 ):
fortitude_save = familiar_obj.obj_set_int( obj_f_npc_save_fortitude_bonus, fortitude_bonus )
reflex_bonus = Reflex( spell.caster )
if ( reflex_bonus >= 3 ):
reflex_save = familiar_obj.obj_set_int( obj_f_npc_save_reflexes_bonus, reflex_bonus )
will_bonus = Will( spell.caster )
if ( will_bonus >= 1 ):
wlll_save = familiar_obj.obj_set_int( obj_f_npc_save_willpower_bonus, will_bonus )
# add familiar to follower list for spell_caster
if not ( spell.caster.follower_atmax() ):
spell.caster.follower_add( familiar_obj )
else:
spell.caster.ai_follower_add( familiar_obj )
# add familiar_obj to d20initiative, and set initiative to spell_caster's
caster_init_value = spell.caster.get_initiative()
familiar_obj.add_to_initiative()
familiar_obj.set_initiative( caster_init_value )
game.update_combat_ui()
# familiar should disappear when duration is over, apply "TIMED_DISAPPEAR" condition
#familiar_obj.condition_add_with_args( 'sp-Summoned', spell.id, spell.duration, 0 )
# add familiar to target list
spell.num_of_targets = 2
spell.target_list[0].obj = familiar_obj
spell.target_list[1].obj = spell.caster # so it triggers OnBeginRound even if the familiar is gone, and ends the spell
#spell.spell_end( spell.id )
def OnBeginRound( spell ):
familiar_obj = spell.target_list[0].obj
print ("Summon Familiar OnBeginRound", "Spell ID: ", spell.id, 'Familiar obj: ', familiar_obj)
if ( familiar_obj.object_flags_get() & OF_DESTROYED):
print("Familiar obj is destroyed, ending spell")
spell.spell_end( spell.id , 1)
return
if familiar_obj.stat_level_get(stat_hp_current) <= -10:
# Remove familiar if dead after one day.
game.timevent_add( RemoveDead, ( spell.caster, familiar_obj ), 86400000) # 1000 = 1 second
return
if familiar_obj.type == obj_t_pc:
print("Familiar obj is PC, terminating spell")
spell.spell_end( spell.id , 1)
return
if familiar_obj not in game.party:
print("Familiar is not in party, terminating spell")
for f,p in familiar_table.items():
if familiar_obj.proto == p:
spell.target_list.remove(familiar_obj)
familiar_obj.destroy()
spell.spell_end( spell.id , 1)
return
familiar_id = get_ID(familiar_obj)
if familiar_id == 0:
return
for f,p in familiar_table.items():
if familiar_obj.proto != p:
continue
itemA = spell.caster.item_find_by_proto( f )
if itemA == OBJ_HANDLE_NULL:
continue
item_id = get_ID(itemA)
if item_id != familiar_id:
print('Summon Familiar OnBeginRound: found ID mismatch, ({:d} vs {:d}) correcting'.format(familiar_id, item_id))
set_ID(itemA, familiar_id)
return
def OnEndSpellCast( spell ):
print "Summon Familiar OnEndSpellCast"
for f,p in familiar_table.items():
itemA = spell.caster.item_find_by_proto( f )
if itemA != OBJ_HANDLE_NULL:
clear_ID( itemA )
####################################################################################################
# Functions Called in the Spell
####################################################################################################
def FindFamiliarProto( master, x ):
# Returns either the familiar creature's proto ID ( x = 1 ) or the familiar inventory object ( x = 0 )
for f,p in familiar_table.items():
itemC = master.item_find_by_proto( f )
if ( itemC != OBJ_HANDLE_NULL ):
if x :
return p
else:
return f
return 0
def GetFamiliarHandle( spell, familiar_proto_id ):
# Returns a handle that can be used to manipulate the familiar creature object
for npc in game.obj_list_vicinity( spell.target_loc, OLC_CRITTERS ):
if (npc.name == familiar_proto_id):
if get_ID( npc ) == 0:
return npc
return OBJ_HANDLE_NULL
def get_ID(obj):
# Returns embedded ID number
return obj.obj_get_int(obj_f_secretdoor_dc)
def set_ID( obj, val ):
# Embeds ID number into mobile object. Returns ID number.
obj.obj_set_int( obj_f_secretdoor_dc, val )
return obj.obj_get_int( obj_f_secretdoor_dc )
def clear_ID( obj ):
# Clears embedded ID number from mobile object
obj.obj_set_int( obj_f_secretdoor_dc, 0 )
def FindMaster( npc ):
# Not actually used in the spell, but could be handy in the future. Returns the character that is the master for a given summoned familiar ( npc )
for p_master in game.obj_list_vicinity( npc.location, OLC_CRITTERS ):
for x,y in familiar_table.items():
item = p_master.item_find_by_proto( x )
if (item != OBJ_HANDLE_NULL):
if ( get_ID(item) == get_ID( npc ) ):
return p_master
return OBJ_HANDLE_NULL
def GetLevel( npc ):
# Returns characters combined sorcerer and wizard levels
level = npc.stat_level_get(stat_level_sorcerer) + npc.stat_level_get(stat_level_wizard)
return level
def Fortitude( npc ):
# Returns Fortitude Save Bonus for all the casters class levels
bonus = 0
level = npc.stat_level_get(stat_level_barbarian) + npc.stat_level_get(stat_level_cleric) + npc.stat_level_get(stat_level_druid) + npc.stat_level_get(stat_level_fighter) + npc.stat_level_get(stat_level_paladin) + npc.stat_level_get(stat_level_ranger) + npc.stat_level_get(stat_level_monk)
if ( level != 0 ):
bonus = ( ( level / 2 ) + 2 )
level = npc.stat_level_get(stat_level_bard) + npc.stat_level_get(stat_level_rogue) + npc.stat_level_get(stat_level_sorcerer) + npc.stat_level_get(stat_level_wizard)
if ( level != 0 ):
bonus = bonus + ( level / 3 )
return bonus
def Reflex( npc ):
# Returns Reflex Save Bonus for all the casters class levels
bonus = 0
level = npc.stat_level_get(stat_level_barbarian) + npc.stat_level_get(stat_level_cleric) + npc.stat_level_get(stat_level_druid) + npc.stat_level_get(stat_level_fighter) + npc.stat_level_get(stat_level_paladin) + npc.stat_level_get(stat_level_sorcerer) + npc.stat_level_get(stat_level_wizard)
if ( level != 0 ):
bonus = ( level / 3 )
level = npc.stat_level_get(stat_level_ranger) + npc.stat_level_get(stat_level_rogue) + npc.stat_level_get(stat_level_monk) + npc.stat_level_get(stat_level_bard)
if ( level != 0 ):
bonus = bonus + ( ( level / 2 ) + 2 )
return bonus
def Will( npc ):
# Returns Will Save Bonus for all the casters class levels
bonus = 0
level = npc.stat_level_get(stat_level_bard) + npc.stat_level_get(stat_level_cleric) + npc.stat_level_get(stat_level_druid) + npc.stat_level_get(stat_level_monk) + npc.stat_level_get(stat_level_sorcerer) + npc.stat_level_get(stat_level_wizard)
if ( level != 0 ):
bonus = ( ( level / 2 ) + 2 )
level = npc.stat_level_get(stat_level_barbarian) + npc.stat_level_get(stat_level_fighter) + npc.stat_level_get(stat_level_paladin) + npc.stat_level_get(stat_level_ranger) + npc.stat_level_get(stat_level_rogue)
if ( level != 0 ):
bonus = bonus + ( level / 3 )
return bonus
def RemoveDead(npc, critter):
print('Summon Familiar: Removing dead familiar')
if critter.stat_level_get(stat_hp_current) <= -10:
npc.follower_remove(critter)
return
|
GrognardsFromHell/TemplePlus
|
tpdatasrc/co8infra/scr/Spell760 - Summon Familiar.py
|
Python
|
mit
| 11,171
|
#!/usr/bin/env python
#
# Copyright 2011 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Methods for checking JS files for common style guide violations.
These style guide violations should only apply to JavaScript and not an Ecma
scripting languages.
"""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)',
'jacobr@google.com (Jacob Richman)')
import re
from closure_linter import ecmalintrules
from closure_linter import error_check
from closure_linter import errors
from closure_linter import javascripttokenizer
from closure_linter import javascripttokens
from closure_linter import requireprovidesorter
from closure_linter import tokenutil
from closure_linter.common import error
from closure_linter.common import position
# Shorthand
Error = error.Error
Position = position.Position
Rule = error_check.Rule
Type = javascripttokens.JavaScriptTokenType
class JavaScriptLintRules(ecmalintrules.EcmaScriptLintRules):
"""JavaScript lint rules that catch JavaScript specific style errors."""
def __init__(self, namespaces_info):
"""Initializes a JavaScriptLintRules instance."""
ecmalintrules.EcmaScriptLintRules.__init__(self)
self._namespaces_info = namespaces_info
def HandleMissingParameterDoc(self, token, param_name):
"""Handle errors associated with a parameter missing a param tag."""
self._HandleError(errors.MISSING_PARAMETER_DOCUMENTATION,
'Missing docs for parameter: "%s"' % param_name, token)
def __ContainsRecordType(self, token):
"""Check whether the given token contains a record type.
Args:
token: The token being checked
Returns:
True if the token contains a record type, False otherwise.
"""
# If we see more than one left-brace in the string of an annotation token,
# then there's a record type in there.
return (
token and token.type == Type.DOC_FLAG and
token.attached_object.type is not None and
token.attached_object.type.find('{') != token.string.rfind('{'))
def CheckToken(self, token, state):
"""Checks a token, given the current parser_state, for warnings and errors.
Args:
token: The current token under consideration
state: parser_state object that indicates the current state in the page
"""
if self.__ContainsRecordType(token):
# We should bail out and not emit any warnings for this annotation.
# TODO(nicksantos): Support record types for real.
state.GetDocComment().Invalidate()
return
# Call the base class's CheckToken function.
super(JavaScriptLintRules, self).CheckToken(token, state)
# Store some convenience variables
namespaces_info = self._namespaces_info
if token.type == Type.DOC_FLAG:
flag = token.attached_object
if flag.flag_type == 'param' and flag.name_token is not None:
self._CheckForMissingSpaceBeforeToken(
token.attached_object.name_token)
if flag.flag_type in state.GetDocFlag().HAS_TYPE:
# Check for both missing type token and empty type braces '{}'
# Missing suppress types are reported separately and we allow enums
# without types.
if (flag.flag_type not in ('suppress', 'enum') and
(not flag.type or flag.type.isspace())):
self._HandleError(errors.MISSING_JSDOC_TAG_TYPE,
'Missing type in %s tag' % token.string, token)
elif flag.name_token and flag.type_end_token and tokenutil.Compare(
flag.type_end_token, flag.name_token) > 0:
self._HandleError(
errors.OUT_OF_ORDER_JSDOC_TAG_TYPE,
'Type should be immediately after %s tag' % token.string,
token)
elif token.type == Type.DOUBLE_QUOTE_STRING_START:
next_token = token.next
while next_token.type == Type.STRING_TEXT:
if javascripttokenizer.JavaScriptTokenizer.SINGLE_QUOTE.search(
next_token.string):
break
next_token = next_token.next
else:
self._HandleError(
errors.UNNECESSARY_DOUBLE_QUOTED_STRING,
'Single-quoted string preferred over double-quoted string.',
token,
Position.All(token.string))
elif token.type == Type.END_DOC_COMMENT:
if (error_check.ShouldCheck(Rule.BLANK_LINES_AT_TOP_LEVEL) and
not self._is_html and state.InTopLevel() and not state.InBlock()):
# Check if we're in a fileoverview or constructor JsDoc.
doc_comment = state.GetDocComment()
is_constructor = (
doc_comment.HasFlag('constructor') or
doc_comment.HasFlag('interface'))
is_file_overview = doc_comment.HasFlag('fileoverview')
# If the comment is not a file overview, and it does not immediately
# precede some code, skip it.
# NOTE: The tokenutil methods are not used here because of their
# behavior at the top of a file.
next_token = token.next
if (not next_token or
(not is_file_overview and next_token.type in Type.NON_CODE_TYPES)):
return
# Don't require extra blank lines around suppression of extra
# goog.require errors.
if (doc_comment.SuppressionOnly() and
next_token.type == Type.IDENTIFIER and
next_token.string in ['goog.provide', 'goog.require']):
return
# Find the start of this block (include comments above the block, unless
# this is a file overview).
block_start = doc_comment.start_token
if not is_file_overview:
token = block_start.previous
while token and token.type in Type.COMMENT_TYPES:
block_start = token
token = token.previous
# Count the number of blank lines before this block.
blank_lines = 0
token = block_start.previous
while token and token.type in [Type.WHITESPACE, Type.BLANK_LINE]:
if token.type == Type.BLANK_LINE:
# A blank line.
blank_lines += 1
elif token.type == Type.WHITESPACE and not token.line.strip():
# A line with only whitespace on it.
blank_lines += 1
token = token.previous
# Log errors.
error_message = False
expected_blank_lines = 0
if is_file_overview and blank_lines == 0:
error_message = 'Should have a blank line before a file overview.'
expected_blank_lines = 1
elif is_constructor and blank_lines != 3:
error_message = (
'Should have 3 blank lines before a constructor/interface.')
expected_blank_lines = 3
elif not is_file_overview and not is_constructor and blank_lines != 2:
error_message = 'Should have 2 blank lines between top-level blocks.'
expected_blank_lines = 2
if error_message:
self._HandleError(
errors.WRONG_BLANK_LINE_COUNT, error_message,
block_start, Position.AtBeginning(),
expected_blank_lines - blank_lines)
elif token.type == Type.END_BLOCK:
if state.InFunction() and state.IsFunctionClose():
is_immediately_called = (token.next and
token.next.type == Type.START_PAREN)
function = state.GetFunction()
if not self._limited_doc_checks:
if (function.has_return and function.doc and
not is_immediately_called and
not function.doc.HasFlag('return') and
not function.doc.InheritsDocumentation() and
not function.doc.HasFlag('constructor')):
# Check for proper documentation of return value.
self._HandleError(
errors.MISSING_RETURN_DOCUMENTATION,
'Missing @return JsDoc in function with non-trivial return',
function.doc.end_token, Position.AtBeginning())
elif (not function.has_return and
not function.has_throw and
function.doc and
function.doc.HasFlag('return') and
not state.InInterfaceMethod()):
return_flag = function.doc.GetFlag('return')
if (return_flag.type is None or (
'undefined' not in return_flag.type and
'void' not in return_flag.type and
'*' not in return_flag.type)):
self._HandleError(
errors.UNNECESSARY_RETURN_DOCUMENTATION,
'Found @return JsDoc on function that returns nothing',
return_flag.flag_token, Position.AtBeginning())
if state.InFunction() and state.IsFunctionClose():
is_immediately_called = (token.next and
token.next.type == Type.START_PAREN)
if (function.has_this and function.doc and
not function.doc.HasFlag('this') and
not function.is_constructor and
not function.is_interface and
'.prototype.' not in function.name):
self._HandleError(
errors.MISSING_JSDOC_TAG_THIS,
'Missing @this JsDoc in function referencing "this". ('
'this usually means you are trying to reference "this" in '
'a static function, or you have forgotten to mark a '
'constructor with @constructor)',
function.doc.end_token, Position.AtBeginning())
elif token.type == Type.IDENTIFIER:
if token.string == 'goog.inherits' and not state.InFunction():
if state.GetLastNonSpaceToken().line_number == token.line_number:
self._HandleError(
errors.MISSING_LINE,
'Missing newline between constructor and goog.inherits',
token,
Position.AtBeginning())
extra_space = state.GetLastNonSpaceToken().next
while extra_space != token:
if extra_space.type == Type.BLANK_LINE:
self._HandleError(
errors.EXTRA_LINE,
'Extra line between constructor and goog.inherits',
extra_space)
extra_space = extra_space.next
# TODO(robbyw): Test the last function was a constructor.
# TODO(robbyw): Test correct @extends and @implements documentation.
elif (token.string == 'goog.provide' and
not state.InFunction() and
namespaces_info is not None):
namespace = tokenutil.Search(token, Type.STRING_TEXT).string
# Report extra goog.provide statement.
if namespaces_info.IsExtraProvide(token):
self._HandleError(
errors.EXTRA_GOOG_PROVIDE,
'Unnecessary goog.provide: ' + namespace,
token, position=Position.AtBeginning())
if namespaces_info.IsLastProvide(token):
# Report missing provide statements after the last existing provide.
missing_provides = namespaces_info.GetMissingProvides()
if missing_provides:
self._ReportMissingProvides(
missing_provides,
tokenutil.GetLastTokenInSameLine(token).next,
False)
# If there are no require statements, missing requires should be
# reported after the last provide.
if not namespaces_info.GetRequiredNamespaces():
missing_requires = namespaces_info.GetMissingRequires()
if missing_requires:
self._ReportMissingRequires(
missing_requires,
tokenutil.GetLastTokenInSameLine(token).next,
True)
elif (token.string == 'goog.require' and
not state.InFunction() and
namespaces_info is not None):
namespace = tokenutil.Search(token, Type.STRING_TEXT).string
# If there are no provide statements, missing provides should be
# reported before the first require.
if (namespaces_info.IsFirstRequire(token) and
not namespaces_info.GetProvidedNamespaces()):
missing_provides = namespaces_info.GetMissingProvides()
if missing_provides:
self._ReportMissingProvides(
missing_provides,
tokenutil.GetFirstTokenInSameLine(token),
True)
# Report extra goog.require statement.
if namespaces_info.IsExtraRequire(token):
self._HandleError(
errors.EXTRA_GOOG_REQUIRE,
'Unnecessary goog.require: ' + namespace,
token, position=Position.AtBeginning())
# Report missing goog.require statements.
if namespaces_info.IsLastRequire(token):
missing_requires = namespaces_info.GetMissingRequires()
if missing_requires:
self._ReportMissingRequires(
missing_requires,
tokenutil.GetLastTokenInSameLine(token).next,
False)
elif token.type == Type.OPERATOR:
last_in_line = token.IsLastInLine()
# If the token is unary and appears to be used in a unary context
# it's ok. Otherwise, if it's at the end of the line or immediately
# before a comment, it's ok.
# Don't report an error before a start bracket - it will be reported
# by that token's space checks.
if (not token.metadata.IsUnaryOperator() and not last_in_line
and not token.next.IsComment()
and not token.next.IsOperator(',')
and not token.next.type in (Type.WHITESPACE, Type.END_PAREN,
Type.END_BRACKET, Type.SEMICOLON,
Type.START_BRACKET)):
self._HandleError(
errors.MISSING_SPACE,
'Missing space after "%s"' % token.string,
token,
Position.AtEnd(token.string))
elif token.type == Type.WHITESPACE:
first_in_line = token.IsFirstInLine()
last_in_line = token.IsLastInLine()
# Check whitespace length if it's not the first token of the line and
# if it's not immediately before a comment.
if not last_in_line and not first_in_line and not token.next.IsComment():
# Ensure there is no space after opening parentheses.
if (token.previous.type in (Type.START_PAREN, Type.START_BRACKET,
Type.FUNCTION_NAME)
or token.next.type == Type.START_PARAMETERS):
self._HandleError(
errors.EXTRA_SPACE,
'Extra space after "%s"' % token.previous.string,
token,
Position.All(token.string))
def _ReportMissingProvides(self, missing_provides, token, need_blank_line):
"""Reports missing provide statements to the error handler.
Args:
missing_provides: A list of strings where each string is a namespace that
should be provided, but is not.
token: The token where the error was detected (also where the new provides
will be inserted.
need_blank_line: Whether a blank line needs to be inserted after the new
provides are inserted. May be True, False, or None, where None
indicates that the insert location is unknown.
"""
self._HandleError(
errors.MISSING_GOOG_PROVIDE,
'Missing the following goog.provide statements:\n' +
'\n'.join(map(lambda x: 'goog.provide(\'%s\');' % x,
sorted(missing_provides))),
token, position=Position.AtBeginning(),
fix_data=(missing_provides, need_blank_line))
def _ReportMissingRequires(self, missing_requires, token, need_blank_line):
"""Reports missing require statements to the error handler.
Args:
missing_requires: A list of strings where each string is a namespace that
should be required, but is not.
token: The token where the error was detected (also where the new requires
will be inserted.
need_blank_line: Whether a blank line needs to be inserted before the new
requires are inserted. May be True, False, or None, where None
indicates that the insert location is unknown.
"""
self._HandleError(
errors.MISSING_GOOG_REQUIRE,
'Missing the following goog.require statements:\n' +
'\n'.join(map(lambda x: 'goog.require(\'%s\');' % x,
sorted(missing_requires))),
token, position=Position.AtBeginning(),
fix_data=(missing_requires, need_blank_line))
def Finalize(self, state, tokenizer_mode):
"""Perform all checks that need to occur after all lines are processed."""
# Call the base class's Finalize function.
super(JavaScriptLintRules, self).Finalize(state, tokenizer_mode)
namespaces_info = self._namespaces_info
if namespaces_info is not None:
# If there are no provide or require statements, missing provides and
# requires should be reported on line 1.
if (not namespaces_info.GetProvidedNamespaces() and
not namespaces_info.GetRequiredNamespaces()):
missing_provides = namespaces_info.GetMissingProvides()
if missing_provides:
self._ReportMissingProvides(
missing_provides, state.GetFirstToken(), None)
missing_requires = namespaces_info.GetMissingRequires()
if missing_requires:
self._ReportMissingRequires(
missing_requires, state.GetFirstToken(), None)
self._CheckSortedRequiresProvides(state.GetFirstToken())
def _CheckSortedRequiresProvides(self, token):
"""Checks that all goog.require and goog.provide statements are sorted.
Note that this method needs to be run after missing statements are added to
preserve alphabetical order.
Args:
token: The first token in the token stream.
"""
sorter = requireprovidesorter.RequireProvideSorter()
provides_result = sorter.CheckProvides(token)
if provides_result:
self._HandleError(
errors.GOOG_PROVIDES_NOT_ALPHABETIZED,
'goog.provide classes must be alphabetized. The correct code is:\n' +
'\n'.join(
map(lambda x: 'goog.require(\'%s\');' % x, provides_result[1])),
provides_result[0],
position=Position.AtBeginning(),
fix_data=provides_result[0])
requires_result = sorter.CheckRequires(token)
if requires_result:
self._HandleError(
errors.GOOG_REQUIRES_NOT_ALPHABETIZED,
'goog.require classes must be alphabetized. The correct code is:\n' +
'\n'.join(
map(lambda x: 'goog.require(\'%s\');' % x, requires_result[1])),
requires_result[0],
position=Position.AtBeginning(),
fix_data=requires_result[0])
def GetLongLineExceptions(self):
"""Gets a list of regexps for lines which can be longer than the limit."""
return [
re.compile('goog\.require\(.+\);?\s*$'),
re.compile('goog\.provide\(.+\);?\s*$')
]
|
ghostx2013/FabricEngine_Backup
|
Native/ThirdParty/Private/Python/closure_linter/javascriptlintrules.py
|
Python
|
agpl-3.0
| 19,602
|
# -*- coding: utf-8 -*-
from django.conf.urls import url
from app_administrativo.candidato.views import *
urlpatterns = [
url(r'^$', CandidatoListView.as_view(), name=u'candidato_home'),
url(r'^cadastar/$', CandidatoFormView.as_view(), name=u'candidato_cadastrar'),
url(r'^(?P<pk>[0-9]+)/$', CandidatoDetailView.as_view(), name=u'candidato_detalhar'),
url(r'^(?P<pk>[\w-]+)/atualizar$', CandidatoUpdateView.as_view(), name=u'candidato_atualizar'),
url(r'^(?P<pk>[\w-]+)/deletar/$', CandidatoDeleteView.as_view(), name=u'candidato_deletar'),
]
|
dparaujo/projeto
|
app_administrativo/candidato/urls.py
|
Python
|
gpl-3.0
| 688
|
from chat import application, init_db
from gevent import monkey
from socketio.server import SocketIOServer
monkey.patch_all()
init_db()
if __name__ == '__main__':
SocketIOServer(
('', application.config['PORT']),
application,
resource="socket.io").serve_forever()
|
yakudzam/promuatest
|
runserver.py
|
Python
|
apache-2.0
| 295
|
# Copyright 2008 Alex Collins
#
# This file is part of Pyela.
#
# Pyela is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyela is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pyela. If not, see <http://www.gnu.org/licenses/>.
all = ["managers", "session"]
|
atc-/pyela
|
pyela/el/logic/__init__.py
|
Python
|
gpl-3.0
| 710
|
#
# ICRAR - International Centre for Radio Astronomy Research
# (c) UWA - The University of Western Australia, 2020
# Copyright by UWA (in the framework of the ICRAR)
# All rights reserved
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
from ..common import tool
def include_dir(_parser, _args):
from . import get_include_dir
print(get_include_dir())
def register_commands():
tool.cmdwrap('nm', 'Starts a Node Manager', 'dlg.manager.cmdline:dlgNM')
tool.cmdwrap('dim', 'Starts a Drop Island Manager', 'dlg.manager.cmdline:dlgDIM')
tool.cmdwrap('mm', 'Starts a Master Manager', 'dlg.manager.cmdline:dlgMM')
tool.cmdwrap('replay', 'Starts a Replay Manager', 'dlg.manager.cmdline:dlgReplay')
tool.cmdwrap('daemon', 'Starts a DALiuGE Daemon process', 'dlg.manager.proc_daemon:run_with_cmdline')
tool.cmdwrap('proxy', 'A reverse proxy to be used in restricted environments to contact the Drop Managers', 'dlg.deploy.pawsey.dfms_proxy:run')
tool.cmdwrap('monitor', 'A proxy to be used in conjunction with the dlg proxy in restricted environments', 'dlg.deploy.pawsey.dfms_monitor:run')
tool.cmdwrap('include_dir', 'Print the directory where C header files can be found', include_dir)
|
steve-ord/daliuge
|
daliuge-engine/dlg/runtime/tool_commands.py
|
Python
|
lgpl-2.1
| 1,948
|
import pymongo
import configparser
def db():
config = configparser.RawConfigParser()
config.read('./.config')
host = config.get('tumblr', 'host')
port = config.get('tumblr', 'port')
user = config.get('tumblr', 'user')
passwd = config.get('tumblr', 'passwd')
client = pymongo.MongoClient(host, int(port))
client.admin.authenticate(user, passwd, mechanism = 'SCRAM-SHA-1', source='test')
testDB = client.test
return testDB
|
blacksky0000/tools
|
tumblr/dbconnect.py
|
Python
|
mit
| 463
|
import os
def listdir(path, data):
for f in os.listdir(path):
if path != ".":
full = path + "/" + f
else:
full = f
if os.path.isdir(full):
listdir(full, data)
else:
ext = os.path.splitext(f)[1]
if ext not in (".dll", ".py", ".icf", ".py", ".js"):
data.append(full)
files = []
listdir(".", files)
print files
s = ""
for f in files:
s = s + " --embed " + f
cmd = "file_packager.py test --js-output=data.js " + s
print cmd
os.system(cmd)
|
unitpoint/oxygine-objectscript
|
examples/HelloWorld/data/pack.py
|
Python
|
mit
| 579
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run Performance Test Bisect Tool
This script is used by a try bot to run the bisect script with the parameters
specified in the bisect config file. It checks out a copy of the depot in
a subdirectory 'bisect' of the working directory provided, annd runs the
bisect scrip there.
"""
import json
import optparse
import os
import platform
import re
import shlex
import subprocess
import sys
import traceback
from auto_bisect import bisect_perf_regression
from auto_bisect import bisect_utils
from auto_bisect import math_utils
from auto_bisect import source_control
CROS_BOARD_ENV = 'BISECT_CROS_BOARD'
CROS_IP_ENV = 'BISECT_CROS_IP'
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
SRC_DIR = os.path.join(SCRIPT_DIR, os.path.pardir)
BISECT_CONFIG_PATH = os.path.join(SCRIPT_DIR, 'auto_bisect', 'bisect.cfg')
RUN_TEST_CONFIG_PATH = os.path.join(SCRIPT_DIR, 'run-perf-test.cfg')
WEBKIT_RUN_TEST_CONFIG_PATH = os.path.join(
SRC_DIR, 'third_party', 'WebKit', 'Tools', 'run-perf-test.cfg')
BISECT_SCRIPT_DIR = os.path.join(SCRIPT_DIR, 'auto_bisect')
PERF_BENCHMARKS_PATH = 'tools/perf/benchmarks'
PERF_MEASUREMENTS_PATH = 'tools/perf/measurements'
BUILDBOT_BUILDERNAME = 'BUILDBOT_BUILDERNAME'
BENCHMARKS_JSON_FILE = 'benchmarks.json'
# This is used to identify tryjobs triggered by the commit queue.
_COMMIT_QUEUE_USERS = [
'5071639625-1lppvbtck1morgivc6sq4dul7klu27sd@developer.gserviceaccount.com',
'commit-bot@chromium.org']
class Goma(object):
def __init__(self, path_to_goma):
self._abs_path_to_goma = None
self._abs_path_to_goma_file = None
if not path_to_goma:
return
self._abs_path_to_goma = os.path.abspath(path_to_goma)
filename = 'goma_ctl.bat' if os.name == 'nt' else 'goma_ctl.sh'
self._abs_path_to_goma_file = os.path.join(self._abs_path_to_goma, filename)
def __enter__(self):
if self._HasGomaPath():
self._SetupAndStart()
return self
def __exit__(self, *_):
if self._HasGomaPath():
self._Stop()
def _HasGomaPath(self):
return bool(self._abs_path_to_goma)
def _SetupEnvVars(self):
if os.name == 'nt':
os.environ['CC'] = (os.path.join(self._abs_path_to_goma, 'gomacc.exe') +
' cl.exe')
os.environ['CXX'] = (os.path.join(self._abs_path_to_goma, 'gomacc.exe') +
' cl.exe')
else:
os.environ['PATH'] = os.pathsep.join([self._abs_path_to_goma,
os.environ['PATH']])
def _SetupAndStart(self):
"""Sets up goma and launches it.
Args:
path_to_goma: Path to goma directory.
Returns:
True if successful."""
self._SetupEnvVars()
# Sometimes goma is lingering around if something went bad on a previous
# run. Stop it before starting a new process. Can ignore the return code
# since it will return an error if it wasn't running.
self._Stop()
if subprocess.call([self._abs_path_to_goma_file, 'start']):
raise RuntimeError('Goma failed to start.')
def _Stop(self):
subprocess.call([self._abs_path_to_goma_file, 'stop'])
def _LoadConfigFile(config_file_path):
"""Attempts to load the specified config file as a module
and grab the global config dict.
Args:
config_file_path: Path to the config file.
Returns:
If successful, returns the config dict loaded from the file. If no
such dictionary could be loaded, returns the empty dictionary.
"""
try:
local_vars = {}
execfile(config_file_path, local_vars)
return local_vars['config']
except Exception:
print
traceback.print_exc()
print
return {}
def _ValidateConfigFile(config_contents, required_parameters):
"""Validates the config file contents, checking whether all values are
non-empty.
Args:
config_contents: A config dictionary.
required_parameters: A list of parameters to check for.
Returns:
True if valid.
"""
for parameter in required_parameters:
if parameter not in config_contents:
return False
value = config_contents[parameter]
if not value or type(value) is not str:
return False
return True
def _ValidatePerfConfigFile(config_contents):
"""Validates the perf config file contents.
This is used when we're doing a perf try job, rather than a bisect.
The config file is called run-perf-test.cfg by default.
The parameters checked are the required parameters; any additional optional
parameters won't be checked and validation will still pass.
Args:
config_contents: A config dictionary.
Returns:
True if valid.
"""
return _ValidateConfigFile(config_contents, required_parameters=['command'])
def _ValidateBisectConfigFile(config_contents):
"""Validates the bisect config file contents.
The parameters checked are the required parameters; any additional optional
parameters won't be checked and validation will still pass.
Args:
config_contents: A config dictionary.
Returns:
True if valid.
"""
return _ValidateConfigFile(
config_contents,
required_parameters=['command', 'good_revision', 'bad_revision'])
def _OutputFailedResults(text_to_print):
bisect_utils.OutputAnnotationStepStart('Results - Failed')
print
print text_to_print
print
bisect_utils.OutputAnnotationStepClosed()
def _CreateBisectOptionsFromConfig(config):
print config['command']
opts_dict = {}
opts_dict['command'] = config['command']
opts_dict['metric'] = config.get('metric')
if config['repeat_count']:
opts_dict['repeat_test_count'] = int(config['repeat_count'])
if config['truncate_percent']:
opts_dict['truncate_percent'] = int(config['truncate_percent'])
if config['max_time_minutes']:
opts_dict['max_time_minutes'] = _Clamp(
int(config['max_time_minutes']), low=1, high=60)
if config.has_key('use_goma'):
opts_dict['use_goma'] = config['use_goma']
if config.has_key('goma_dir'):
opts_dict['goma_dir'] = config['goma_dir']
if config.has_key('improvement_direction'):
opts_dict['improvement_direction'] = int(config['improvement_direction'])
if config.has_key('required_initial_confidence'):
opts_dict['required_initial_confidence'] = float(
config['required_initial_confidence'])
if config.has_key('target_arch'):
opts_dict['target_arch'] = config['target_arch']
if config.has_key('bug_id') and str(config['bug_id']).isdigit():
opts_dict['bug_id'] = config['bug_id']
if config.has_key('try_job_id'):
opts_dict['try_job_id'] = config['try_job_id']
opts_dict['build_preference'] = 'ninja'
opts_dict['output_buildbot_annotations'] = True
if '--browser=cros' in config['command']:
opts_dict['target_platform'] = 'cros'
if os.environ[CROS_BOARD_ENV] and os.environ[CROS_IP_ENV]:
opts_dict['cros_board'] = os.environ[CROS_BOARD_ENV]
opts_dict['cros_remote_ip'] = os.environ[CROS_IP_ENV]
else:
raise RuntimeError('CrOS build selected, but BISECT_CROS_IP or'
'BISECT_CROS_BOARD undefined.')
elif 'android' in config['command']:
if 'android-chromium' in config['command']:
opts_dict['target_platform'] = 'android'
elif 'android-chrome' in config['command']:
opts_dict['target_platform'] = 'android-chrome'
else:
opts_dict['target_platform'] = 'android'
return bisect_perf_regression.BisectOptions.FromDict(opts_dict)
def _Clamp(n, low, high):
"""Clamps a value to a range."""
return min(high, max(low, n))
def _ParseCloudLinksFromOutput(output):
html_results_pattern = re.compile(
r'\s(?P<VALUES>http://storage.googleapis.com/' +
'chromium-telemetry/html-results/results-[a-z0-9-_]+)\s',
re.MULTILINE)
profiler_pattern = re.compile(
r'\s(?P<VALUES>https://console.developers.google.com/' +
'm/cloudstorage/b/[a-z-]+/o/profiler-[a-z0-9-_.]+)\s',
re.MULTILINE)
results = {
'html-results': html_results_pattern.findall(output),
'profiler': profiler_pattern.findall(output),
}
return results
def _ParseAndOutputCloudLinks(
results_without_patch, results_with_patch, annotations_dict):
cloud_links_without_patch = _ParseCloudLinksFromOutput(
results_without_patch[2])
cloud_links_with_patch = _ParseCloudLinksFromOutput(
results_with_patch[2])
cloud_file_link = (cloud_links_without_patch['html-results'][0]
if cloud_links_without_patch['html-results'] else '')
profiler_file_links_with_patch = cloud_links_with_patch['profiler']
profiler_file_links_without_patch = cloud_links_without_patch['profiler']
# Calculate the % difference in the means of the 2 runs.
percent_diff_in_means = None
std_err = None
if (results_with_patch[0].has_key('mean') and
results_with_patch[0].has_key('values')):
percent_diff_in_means = (results_with_patch[0]['mean'] /
max(0.0001, results_without_patch[0]['mean'])) * 100.0 - 100.0
std_err = math_utils.PooledStandardError(
[results_with_patch[0]['values'], results_without_patch[0]['values']])
if percent_diff_in_means is not None and std_err is not None:
bisect_utils.OutputAnnotationStepStart('Results - %.02f +- %0.02f delta' %
(percent_diff_in_means, std_err))
print ' %s %s %s' % (''.center(10, ' '), 'Mean'.center(20, ' '),
'Std. Error'.center(20, ' '))
print ' %s %s %s' % ('Patch'.center(10, ' '),
('%.02f' % results_with_patch[0]['mean']).center(20, ' '),
('%.02f' % results_with_patch[0]['std_err']).center(20, ' '))
print ' %s %s %s' % ('No Patch'.center(10, ' '),
('%.02f' % results_without_patch[0]['mean']).center(20, ' '),
('%.02f' % results_without_patch[0]['std_err']).center(20, ' '))
if cloud_file_link:
bisect_utils.OutputAnnotationStepLink('HTML Results', cloud_file_link)
bisect_utils.OutputAnnotationStepClosed()
elif cloud_file_link:
bisect_utils.OutputAnnotationStepLink('HTML Results', cloud_file_link)
if profiler_file_links_with_patch and profiler_file_links_without_patch:
for i in xrange(len(profiler_file_links_with_patch)):
bisect_utils.OutputAnnotationStepLink(
'%s[%d]' % (annotations_dict.get('profiler_link1'), i),
profiler_file_links_with_patch[i])
for i in xrange(len(profiler_file_links_without_patch)):
bisect_utils.OutputAnnotationStepLink(
'%s[%d]' % (annotations_dict.get('profiler_link2'), i),
profiler_file_links_without_patch[i])
def _ResolveRevisionsFromConfig(config):
if not 'good_revision' in config and not 'bad_revision' in config:
return (None, None)
bad_revision = source_control.ResolveToRevision(
config['bad_revision'], 'chromium', bisect_utils.DEPOT_DEPS_NAME, 100)
if not bad_revision:
raise RuntimeError('Failed to resolve [%s] to git hash.',
config['bad_revision'])
good_revision = source_control.ResolveToRevision(
config['good_revision'], 'chromium', bisect_utils.DEPOT_DEPS_NAME, -100)
if not good_revision:
raise RuntimeError('Failed to resolve [%s] to git hash.',
config['good_revision'])
return (good_revision, bad_revision)
def _GetStepAnnotationStringsDict(config):
if 'good_revision' in config and 'bad_revision' in config:
return {
'build1': 'Building [%s]' % config['good_revision'],
'build2': 'Building [%s]' % config['bad_revision'],
'run1': 'Running [%s]' % config['good_revision'],
'run2': 'Running [%s]' % config['bad_revision'],
'sync1': 'Syncing [%s]' % config['good_revision'],
'sync2': 'Syncing [%s]' % config['bad_revision'],
'results_label1': config['good_revision'],
'results_label2': config['bad_revision'],
'profiler_link1': 'Profiler Data - %s' % config['good_revision'],
'profiler_link2': 'Profiler Data - %s' % config['bad_revision'],
}
else:
return {
'build1': 'Building With Patch',
'build2': 'Building Without Patch',
'run1': 'Running With Patch',
'run2': 'Running Without Patch',
'results_label1': 'Patch',
'results_label2': 'ToT',
'profiler_link1': 'With Patch - Profiler Data',
'profiler_link2': 'Without Patch - Profiler Data',
}
def _RunBuildStepForPerformanceTest(bisect_instance,
build_string,
sync_string,
revision):
if revision:
bisect_utils.OutputAnnotationStepStart(sync_string)
if not source_control.SyncToRevision(revision, 'gclient'):
raise RuntimeError('Failed [%s].' % sync_string)
bisect_utils.OutputAnnotationStepClosed()
bisect_utils.OutputAnnotationStepStart(build_string)
if bisect_utils.RunGClient(['runhooks']):
raise RuntimeError('Failed to run gclient runhooks')
if not bisect_instance.ObtainBuild('chromium'):
raise RuntimeError('Patched version failed to build.')
bisect_utils.OutputAnnotationStepClosed()
def _RunCommandStepForPerformanceTest(bisect_instance,
opts,
reset_on_first_run,
upload_on_last_run,
results_label,
run_string):
bisect_utils.OutputAnnotationStepStart(run_string)
results = bisect_instance.RunPerformanceTestAndParseResults(
opts.command,
opts.metric,
reset_on_first_run=reset_on_first_run,
upload_on_last_run=upload_on_last_run,
results_label=results_label,
allow_flakes=False)
if results[1]:
raise RuntimeError('Patched version failed to run performance test.')
bisect_utils.OutputAnnotationStepClosed()
return results
def _RunPerformanceTest(config):
"""Runs a performance test with and without the current patch.
Args:
config: Contents of the config file, a dictionary.
Attempts to build and run the current revision with and without the
current patch, with the parameters passed in.
"""
# Bisect script expects to be run from the src directory
os.chdir(SRC_DIR)
opts = _CreateBisectOptionsFromConfig(config)
revisions = _ResolveRevisionsFromConfig(config)
annotations_dict = _GetStepAnnotationStringsDict(config)
b = bisect_perf_regression.BisectPerformanceMetrics(opts, os.getcwd())
_RunBuildStepForPerformanceTest(b,
annotations_dict.get('build1'),
annotations_dict.get('sync1'),
revisions[0])
results_with_patch = _RunCommandStepForPerformanceTest(
b, opts, True, True, annotations_dict['results_label1'],
annotations_dict['run1'])
bisect_utils.OutputAnnotationStepStart('Reverting Patch')
# TODO: When this is re-written to recipes, this should use bot_update's
# revert mechanism to fully revert the client. But for now, since we know that
# the perf try bot currently only supports src/ and src/third_party/WebKit, we
# simply reset those two directories.
bisect_utils.CheckRunGit(['reset', '--hard'])
bisect_utils.CheckRunGit(['reset', '--hard'],
os.path.join('third_party', 'WebKit'))
bisect_utils.OutputAnnotationStepClosed()
_RunBuildStepForPerformanceTest(b,
annotations_dict.get('build2'),
annotations_dict.get('sync2'),
revisions[1])
results_without_patch = _RunCommandStepForPerformanceTest(
b, opts, False, True, annotations_dict['results_label2'],
annotations_dict['run2'])
# Find the link to the cloud stored results file.
_ParseAndOutputCloudLinks(
results_without_patch, results_with_patch, annotations_dict)
def _SetupAndRunPerformanceTest(config, path_to_goma, is_cq_tryjob=False):
"""Attempts to build and run the current revision with and without the
current patch, with the parameters passed in.
Args:
config: The config read from run-perf-test.cfg.
path_to_goma: Path to goma directory.
is_cq_tryjob: Whether or not the try job was initiated by commit queue.
Returns:
An exit code: 0 on success, otherwise 1.
"""
if platform.release() == 'XP':
print 'Windows XP is not supported for perf try jobs because it lacks '
print 'goma support. Please refer to crbug.com/330900.'
return 1
try:
with Goma(path_to_goma) as _:
config['use_goma'] = bool(path_to_goma)
if config['use_goma']:
config['goma_dir'] = os.path.abspath(path_to_goma)
if not is_cq_tryjob:
_RunPerformanceTest(config)
else:
return _RunBenchmarksForCommitQueue(config)
return 0
except RuntimeError, e:
bisect_utils.OutputAnnotationStepFailure()
bisect_utils.OutputAnnotationStepClosed()
_OutputFailedResults('Error: %s' % e.message)
return 1
def _RunBisectionScript(
config, working_directory, path_to_goma, path_to_extra_src, dry_run):
"""Attempts to execute the bisect script with the given parameters.
Args:
config: A dict containing the parameters to pass to the script.
working_directory: A working directory to provide to the bisect script,
where it will store it's own copy of the depot.
path_to_goma: Path to goma directory.
path_to_extra_src: Path to extra source file.
dry_run: Do a dry run, skipping sync, build, and performance testing steps.
Returns:
An exit status code: 0 on success, otherwise 1.
"""
_PrintConfigStep(config)
# Construct the basic command with all necessary arguments.
cmd = [
'python',
os.path.join(BISECT_SCRIPT_DIR, 'bisect_perf_regression.py'),
'--command', config['command'],
'--good_revision', config['good_revision'],
'--bad_revision', config['bad_revision'],
'--working_directory', working_directory,
'--output_buildbot_annotations'
]
# Add flags for any optional config parameters if given in the config.
options = [
('metric', '--metric'),
('repeat_count', '--repeat_test_count'),
('truncate_percent', '--truncate_percent'),
('max_time_minutes', '--max_time_minutes'),
('bisect_mode', '--bisect_mode'),
('improvement_direction', '--improvement_direction'),
('bug_id', '--bug_id'),
('try_job_id', '--try_job_id'),
('builder_type', '--builder_type'),
('target_arch', '--target_arch'),
('required_initial_confidence', '--required_initial_confidence'),
]
for config_key, flag in options:
if config.has_key(config_key):
cmd.extend([flag, config[config_key]])
cmd.extend(['--build_preference', 'ninja'])
# Possibly set the target platform name based on the browser name in a
# Telemetry command.
if 'android-chromium' in config['command']:
cmd.extend(['--target_platform', 'android'])
elif 'android-chrome' in config['command']:
cmd.extend(['--target_platform', 'android-chrome'])
elif 'android' in config['command']:
cmd.extend(['--target_platform', 'android'])
if path_to_goma:
# For Windows XP platforms, goma service is not supported.
# Moreover we don't compile chrome when gs_bucket flag is set instead
# use builds archives, therefore ignore goma service for Windows XP.
# See http://crbug.com/330900.
if platform.release() == 'XP':
print ('Goma doesn\'t have a win32 binary, therefore it is not supported '
'on Windows XP platform. Please refer to crbug.com/330900.')
path_to_goma = None
cmd.append('--use_goma')
cmd.append('--goma_dir')
cmd.append(os.path.abspath(path_to_goma))
if path_to_extra_src:
cmd.extend(['--extra_src', path_to_extra_src])
if dry_run:
cmd.extend([
'--debug_ignore_build',
'--debug_ignore_sync',
'--debug_ignore_perf_test'
])
cmd = [str(c) for c in cmd]
with Goma(path_to_goma) as _:
return_code = subprocess.call(cmd)
if return_code:
print ('Error: bisect_perf_regression.py returned with error %d\n'
% return_code)
return return_code
def _PrintConfigStep(config):
"""Prints out the given config, along with Buildbot annotations."""
bisect_utils.OutputAnnotationStepStart('Config')
print
for k, v in config.iteritems():
print ' %s : %s' % (k, v)
print
bisect_utils.OutputAnnotationStepClosed()
def _GetBrowserType(bot_platform):
"""Gets the browser type to be used in the run benchmark command."""
if bot_platform == 'android':
return 'android-chromium'
elif 'x64' in bot_platform:
return 'release_x64'
return 'release'
def _GuessTelemetryTestCommand(bot_platform, test_name=None):
"""Creates a Telemetry benchmark command based on bot and test name."""
command = []
# On Windows, Python scripts should be prefixed with the python command.
if bot_platform == 'win':
command.append('python')
command.append('tools/perf/run_benchmark')
command.append('-v')
command.append('--browser=%s' % _GetBrowserType(bot_platform))
if test_name:
command.append(test_name)
return ' '.join(command)
def _GetConfigBasedOnPlatform(config, bot_name, test_name):
"""Generates required options to create BisectPerformanceMetrics instance."""
opts_dict = {
'command': _GuessTelemetryTestCommand(bot_name, test_name),
'target_arch': 'x64' if 'x64' in bot_name else 'ia32',
'build_preference': 'ninja',
'output_buildbot_annotations': True,
'repeat_test_count': 1,
'bisect_mode': bisect_utils.BISECT_MODE_RETURN_CODE,
}
if 'use_goma' in config:
opts_dict['use_goma'] = config['use_goma']
if 'goma_dir' in config:
opts_dict['goma_dir'] = config['goma_dir']
if 'android-chromium' in opts_dict['command']:
opts_dict['target_platform'] = 'android'
return bisect_perf_regression.BisectOptions.FromDict(opts_dict)
def _GetModifiedFilesFromPatch(cwd=None):
"""Gets list of files modified in the current patch."""
log_output = bisect_utils.CheckRunGit(
['diff', '--no-ext-diff', '--name-only', 'HEAD~1'], cwd=cwd)
modified_files = log_output.split()
return modified_files
def _GetAffectedBenchmarkModuleNames():
"""Gets list of modified benchmark files under tools/perf/benchmarks."""
all_affected_files = _GetModifiedFilesFromPatch()
modified_benchmarks = []
for affected_file in all_affected_files:
if (affected_file.startswith(PERF_BENCHMARKS_PATH) or
affected_file.startswith(PERF_MEASUREMENTS_PATH)):
benchmark = os.path.basename(os.path.splitext(affected_file)[0])
modified_benchmarks.append(benchmark)
return modified_benchmarks
def _ListAvailableBenchmarks(bot_platform):
"""Gets all available benchmarks names as a list."""
browser_type = _GetBrowserType(bot_platform)
if os.path.exists(BENCHMARKS_JSON_FILE):
os.remove(BENCHMARKS_JSON_FILE)
command = []
if 'win' in bot_platform:
command.append('python')
command.append('tools/perf/run_benchmark')
command.extend([
'list',
'--browser',
browser_type,
'--json-output',
BENCHMARKS_JSON_FILE])
try:
output, return_code = bisect_utils.RunProcessAndRetrieveOutput(
command=command, cwd=SRC_DIR)
if return_code:
raise RuntimeError('Something went wrong while listing benchmarks. '
'Please review the command line: %s.\nERROR: [%s]' %
(' '.join(command), output))
with open(BENCHMARKS_JSON_FILE) as tests_json:
tests_data = json.load(tests_json)
if tests_data.get('steps'):
return tests_data.get('steps').keys()
finally:
try:
if os.path.exists(BENCHMARKS_JSON_FILE):
os.remove(BENCHMARKS_JSON_FILE)
except OSError as e:
if e.errno != errno.ENOENT:
raise
return None
def _OutputOverallResults(results):
"""Creates results step and prints results on buildbot job."""
test_status = all(current_value == True for current_value in results.values())
bisect_utils.OutputAnnotationStepStart(
'Results - %s' % ('Passed' if test_status else 'Failed'))
print
print 'Results of benchmarks:'
print
for benchmark, result in results.iteritems():
print '%s: %s' % (benchmark, 'Passed' if result else 'Failed')
if not test_status:
bisect_utils.OutputAnnotationStepFailure()
bisect_utils.OutputAnnotationStepClosed()
# Returns 0 for success and 1 for failure.
return 0 if test_status else 1
def _RunBenchmark(bisect_instance, opts, bot_name, benchmark_name):
"""Runs a Telemetry benchmark."""
bisect_utils.OutputAnnotationStepStart(benchmark_name)
command_to_run = _GuessTelemetryTestCommand(bot_name, benchmark_name)
args = shlex.split(command_to_run, posix=not bisect_utils.IsWindowsHost())
output, return_code = bisect_utils.RunProcessAndRetrieveOutput(args, SRC_DIR)
# A value other than 0 indicates that the test couldn't be run, and results
# should also include an error message.
if return_code:
print ('Error: Something went wrong running the benchmark: %s.'
'Please review the command line:%s\n\n%s' %
(benchmark_name, command_to_run, output))
bisect_utils.OutputAnnotationStepFailure()
print output
bisect_utils.OutputAnnotationStepClosed()
# results[1] contains the return code from subprocess that executes test
# command, On successful test run it contains 0 otherwise any non-zero value.
return return_code == 0
def _RunBenchmarksForCommitQueue(config):
"""Runs Telemetry benchmark for the commit queue."""
os.chdir(SRC_DIR)
# To determine the bot platform by reading buildbot name from environment
# variable.
bot_name = os.environ.get(BUILDBOT_BUILDERNAME)
if not bot_name:
bot_name = sys.platform
bot_name = bot_name.split('_')[0]
affected_benchmarks = _GetAffectedBenchmarkModuleNames()
# Abort if there are no changes to benchmark any existing benchmark files.
if not affected_benchmarks:
bisect_utils.OutputAnnotationStepStart('Results')
print
print ('There are no modification to Telemetry benchmarks,'
' aborting the try job.')
bisect_utils.OutputAnnotationStepClosed()
return 0
# Bisect script expects to be run from the src directory
# Gets required options inorder to create BisectPerformanceMetrics instance.
# Since command is a required arg in BisectPerformanceMetrics, we just create
# a dummy command for now.
opts = _GetConfigBasedOnPlatform(config, bot_name, test_name='')
annotations_dict = _GetStepAnnotationStringsDict(config)
b = bisect_perf_regression.BisectPerformanceMetrics(opts, os.getcwd())
_RunBuildStepForPerformanceTest(b,
annotations_dict.get('build1'),
annotations_dict.get('sync1'),
None)
available_benchmarks = _ListAvailableBenchmarks(bot_name)
overall_results = {}
for affected_benchmark in affected_benchmarks:
for benchmark in available_benchmarks:
if (benchmark.startswith(affected_benchmark) and
not benchmark.endswith('reference')):
overall_results[benchmark] = _RunBenchmark(b, opts, bot_name, benchmark)
return _OutputOverallResults(overall_results)
def _OptionParser():
"""Returns the options parser for run-bisect-perf-regression.py."""
def ConvertJson(option, _, value, parser):
"""Provides an OptionParser callback to unmarshal a JSON string."""
setattr(parser.values, option.dest, json.loads(value))
usage = ('%prog [options] [-- chromium-options]\n'
'Used by a try bot to run the bisection script using the parameters'
' provided in the auto_bisect/bisect.cfg file.')
parser = optparse.OptionParser(usage=usage)
parser.add_option('-w', '--working_directory',
type='str',
help='A working directory to supply to the bisection '
'script, which will use it as the location to checkout '
'a copy of the chromium depot.')
parser.add_option('-p', '--path_to_goma',
type='str',
help='Path to goma directory. If this is supplied, goma '
'builds will be enabled.')
parser.add_option('--path_to_config',
type='str',
help='Path to the config file to use. If this is supplied, '
'the bisect script will use this to override the default '
'config file path. The script will attempt to load it '
'as a bisect config first, then a perf config.')
parser.add_option('--extra_src',
type='str',
help='Path to extra source file. If this is supplied, '
'bisect script will use this to override default behavior.')
parser.add_option('--dry_run',
action="store_true",
help='The script will perform the full bisect, but '
'without syncing, building, or running the performance '
'tests.')
# This argument is passed by buildbot to supply build properties to the bisect
# script. Note: Don't change "--build-properties" property name.
parser.add_option('--build-properties', action='callback',
dest='build_properties',
callback=ConvertJson, type='string',
nargs=1, default={},
help='build properties in JSON format')
return parser
def main():
"""Entry point for run-bisect-perf-regression.py.
Reads the config file, and then tries to either bisect a regression or
just run a performance test, depending on the particular config parameters
specified in the config file.
"""
parser = _OptionParser()
opts, _ = parser.parse_args()
# Use the default config file path unless one was specified.
config_path = BISECT_CONFIG_PATH
if opts.path_to_config:
config_path = opts.path_to_config
config = _LoadConfigFile(config_path)
# Check if the config is valid for running bisect job.
config_is_valid = _ValidateBisectConfigFile(config)
if config and config_is_valid:
if not opts.working_directory:
print 'Error: missing required parameter: --working_directory\n'
parser.print_help()
return 1
return _RunBisectionScript(
config, opts.working_directory, opts.path_to_goma, opts.extra_src,
opts.dry_run)
# If it wasn't valid for running a bisect, then maybe the user wanted
# to run a perf test instead of a bisect job. Try reading any possible
# perf test config files.
perf_cfg_files = [RUN_TEST_CONFIG_PATH, WEBKIT_RUN_TEST_CONFIG_PATH]
for current_perf_cfg_file in perf_cfg_files:
if opts.path_to_config:
path_to_perf_cfg = opts.path_to_config
else:
path_to_perf_cfg = os.path.join(
os.path.abspath(os.path.dirname(sys.argv[0])),
current_perf_cfg_file)
config = _LoadConfigFile(path_to_perf_cfg)
config_is_valid = _ValidatePerfConfigFile(config)
if config and config_is_valid:
return _SetupAndRunPerformanceTest(config, opts.path_to_goma)
# If there are no changes to config file, then check if the request is
# from the commit queue, if so then run the modified Telemetry benchmarks for
# the patch.
if opts.build_properties.get('requester') in _COMMIT_QUEUE_USERS:
return _SetupAndRunPerformanceTest(
config={}, path_to_goma=opts.path_to_goma, is_cq_tryjob=True)
print ('Error: Could not load config file. Double check your changes to '
'auto_bisect/bisect.cfg or run-perf-test.cfg for syntax errors.\n')
return 1
if __name__ == '__main__':
sys.exit(main())
|
heke123/chromium-crosswalk
|
tools/run-bisect-perf-regression.py
|
Python
|
bsd-3-clause
| 31,904
|
#!/usr/bin/python
import os
import commands
import termios
import sys
def enable_echo(fd, enabled):
(iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr (fd)
if enabled:
lflag |= termios.ECHO
else:
lflag &= ~termios.ECHO
new_attr = [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
termios.tcsetattr(fd, termios.TCSANOW, new_attr)
return
def gen_password (passvalue):
import hashlib
value = hashlib.md5 (passvalue).hexdigest ().upper()
hash_len = len (value) * 1.5 -1
iterator = 2
while iterator < hash_len:
value = value[:iterator] + ":" + value[iterator:]
iterator += 3
return value
# track if we should reboot
should_reboot = False
if os.geteuid () != 0:
print ("ERROR: only root can run this tool")
sys.exit (-1)
# get base instalation
(status, output) = commands.getstatusoutput ("turbulence --conf-location | grep SYSCONFDIR")
if status:
print ("ERROR: turbulence get location command failed with status %d, error: %s" % (status, output))
sys.exit (-1)
output = output.replace ("SYSCONFDIR:", "")
baseconfdir = output.replace (" ", "")
print ("INFO: found base configuration at %s/turbulence" % baseconfdir)
# check if radmin directory exists
if not os.path.exists ("%s/turbulence/radmin" % baseconfdir):
print ("INFO: creating directory: %s/turbulence/radmin" % baseconfdir)
os.mkdir ("%s/turbulence/radmin" % baseconfdir)
# ensure permissions are right
(status, output) = commands.getstatusoutput ("chmod o-rwx %s/turbulence/radmin" % baseconfdir)
if status:
print ("ERROR: failed to ensure permissions inside %s/turbulence/radmin directory" % baseconfdir)
sys.exit (-1)
# create radmin.conf
if not os.path.exists ("%s/turbulence/profile.d/radmin.conf" % baseconfdir):
import hashlib
import random
# build serverName
serverName = hashlib.md5 (str(random.random ())).hexdigest ()
print ("INFO: creating %s/turbulence/profile.d/radmin.conf" % baseconfdir)
open ("%s/turbulence/profile.d/radmin.conf" % baseconfdir, "w").write ("<!-- profile path to load mod-radmin from localhost -->\n\
<path-def server-name='%s' \n\
src='127.0.0.1' \n\
path-name='local radmin' \n\
work-dir='%s/turbulence/radmin'>\n\
<if-success profile='http://iana.org/beep/SASL/.*' connmark='sasl:is:authenticated' >\n\
<allow profile='urn:aspl.es:beep:profiles:radmin-ctl' />\n\
</if-success>\n\
</path-def>" % (serverName, baseconfdir))
# ensure permissions are right
(status, output) = commands.getstatusoutput ("chmod o-rwx %s/turbulence/profile.d/radmin.conf" % baseconfdir)
if status:
print ("ERROR: failed to ensure permissions for %s/turbulence/profile.d/radmin.conf file" % baseconfdir)
sys.exit (-1)
# flag we have to reboot
should_reboot = True
# create sasl.conf
if not os.path.exists ("%s/turbulence/radmin/sasl.conf" % baseconfdir):
print ("INFO: creating %s/turbulence/radmin/sasl.conf" % baseconfdir)
open ("%s/turbulence/radmin/sasl.conf" % baseconfdir, "w").write ('<mod-sasl>\n\
<auth-db remote-admins="remote-admins.xml" \n\
remote="no" \n\
format="md5" \n\
location="auth-db.xml" \n\
type="xml" />\n\
<method-allowed>\n\
<method value="plain" />\n\
</method-allowed>\n\
<login-options>\n\
<max-allowed-tries value="3" action="drop"/>\n\
<accounts-disabled action="drop" />\n\
</login-options>\n\
</mod-sasl>')
# create auth-db.xml
if not os.path.exists ("%s/turbulence/radmin/auth-db.xml" % baseconfdir):
print ("No database found, creating one. For this, we need a user and a password")
user = raw_input ("Auth login to create: " ).strip ()
enable_echo (1, False)
password = raw_input ("Type password: " ).strip ()
enable_echo (1, True)
print ""
# gen password
password = gen_password (password)
print ("INFO: creating %s/turbulence/radmin/auth-db.xml" % baseconfdir)
open ("%s/turbulence/radmin/auth-db.xml" % baseconfdir, "w").write ("<sasl-auth-db>\n\
<auth user_id='%s' password='%s' disabled='no'/>\n\
</sasl-auth-db>" % (user, password))
# try to enable module if not
if not os.path.exists ("%s/turbulence/mods-enabled/mod_radmin.xml" % baseconfdir):
print ("INFO: enabling mod-radmin module")
(status, output) = commands.getstatusoutput ("ln -s %s/turbulence/mods-available/mod_radmin.xml %s/turbulence/mods-enabled/mod_radmin.xml" % (baseconfdir, baseconfdir))
if status:
print ("INFO: failed to enable module, ln command failed: %s" % output)
sys.exit (-1)
# flag you should reboot
should_reboot = True
print ("INFO: configuration done!")
if should_reboot:
print ("INFO: you must reboot your turbulence server to make changes effective")
|
ASPLes/turbulence
|
tools/tbc-ctl/tbc-setup-mod-radmin.py
|
Python
|
lgpl-2.1
| 4,910
|
from Tools.Profile import profile
from Tools.BoundFunction import boundFunction
# workaround for required config entry dependencies.
import Screens.MovieSelection
from Screen import Screen
from Screens.MessageBox import MessageBox
profile("LOAD:enigma")
import enigma
profile("LOAD:InfoBarGenerics")
from Screens.InfoBarGenerics import InfoBarShowHide, \
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarRdsDecoder, \
InfoBarEPG, InfoBarSeek, InfoBarInstantRecord, InfoBarRedButton, InfoBarTimerButton, InfoBarVmodeButton, \
InfoBarAudioSelection, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey, \
InfoBarSubserviceSelection, InfoBarShowMovies, InfoBarTimeshift, \
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarSimpleEventView, \
InfoBarSummarySupport, InfoBarMoviePlayerSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions, \
InfoBarSubtitleSupport, InfoBarPiP, InfoBarPlugins, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarPowersaver, \
setResumePoint, delResumePoint
profile("LOAD:InitBar_Components")
from Components.ActionMap import HelpableActionMap
from Components.config import config
from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase
profile("LOAD:HelpableScreen")
from Screens.HelpMenu import HelpableScreen
class InfoBar(InfoBarBase, InfoBarShowHide,
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder,
InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarVmodeButton,
HelpableScreen, InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarUnhandledKey,
InfoBarSubserviceSelection, InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport,
InfoBarSummarySupport, InfoBarTimeshiftState, InfoBarTeletextPlugin, InfoBarExtensions,
InfoBarPiP, InfoBarPlugins, InfoBarSubtitleSupport, InfoBarServiceErrorPopupSupport, InfoBarJobman, InfoBarPowersaver,
Screen):
ALLOW_SUSPEND = True
instance = None
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = HelpableActionMap(self, "InfobarActions",
{
"showMovies": (self.showMovies, _("Play recorded movies...")),
"toogleTvRadio": (self.toogleTvRadio, _("toggels betwenn tv and radio...")),
"openTimerList": (self.openTimerList, _("Show the tv player...")),
"showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")),
}, prio=2)
self.allowPiP = True
self.radioTV = 0
for x in HelpableScreen, \
InfoBarBase, InfoBarShowHide, \
InfoBarNumberZap, InfoBarChannelSelection, InfoBarMenu, InfoBarEPG, InfoBarRdsDecoder, \
InfoBarInstantRecord, InfoBarAudioSelection, InfoBarRedButton, InfoBarTimerButton, InfoBarUnhandledKey, InfoBarVmodeButton,\
InfoBarAdditionalInfo, InfoBarNotifications, InfoBarDish, InfoBarSubserviceSelection, \
InfoBarTimeshift, InfoBarSeek, InfoBarCueSheetSupport, InfoBarSummarySupport, InfoBarTimeshiftState, \
InfoBarTeletextPlugin, InfoBarExtensions, InfoBarPiP, InfoBarSubtitleSupport, InfoBarJobman, InfoBarPowersaver, \
InfoBarPlugins, InfoBarServiceErrorPopupSupport:
x.__init__(self)
self.helpList.append((self["actions"], "InfobarActions", [("showMovies", _("Watch recordings..."))]))
self.helpList.append((self["actions"], "InfobarActions", [("showRadio", _("Listen to the radio..."))]))
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
enigma.iPlayableService.evUpdatedEventInfo: self.__eventInfoChanged
})
self.current_begin_time=0
assert InfoBar.instance is None, "class InfoBar is a singleton class and just one instance of this class is allowed!"
InfoBar.instance = self
def __onClose(self):
InfoBar.instance = None
def __eventInfoChanged(self):
if self.execing:
service = self.session.nav.getCurrentService()
old_begin_time = self.current_begin_time
info = service and service.info()
ptr = info and info.getEvent(0)
self.current_begin_time = ptr and ptr.getBeginTime() or 0
if config.usage.show_infobar_on_event_change.value:
if old_begin_time and old_begin_time != self.current_begin_time:
self.doShow()
def __checkServiceStarted(self):
self.__serviceStarted(True)
self.onExecBegin.remove(self.__checkServiceStarted)
def serviceStarted(self): #override from InfoBarShowHide
new = self.servicelist.newServicePlayed()
if self.execing:
InfoBarShowHide.serviceStarted(self)
self.current_begin_time=0
elif not self.__checkServiceStarted in self.onShown and new:
self.onShown.append(self.__checkServiceStarted)
def __checkServiceStarted(self):
self.serviceStarted()
self.onShown.remove(self.__checkServiceStarted)
def showTv(self):
self.showTvChannelList(True)
def showRadio(self):
if config.usage.e1like_radio_mode.value:
self.showRadioChannelList(True)
else:
self.rds_display.hide() # in InfoBarRdsDecoder
from Screens.ChannelSelection import ChannelSelectionRadio
self.session.openWithCallback(self.ChannelSelectionRadioClosed, ChannelSelectionRadio, self)
def toogleTvRadio(self):
if self.radioTV == 1:
self.radioTV = 0
self.showTv()
else:
self.radioTV = 1
self.showRadio()
def ChannelSelectionRadioClosed(self, *arg):
self.rds_display.show() # in InfoBarRdsDecoder
def showMovies(self, defaultRef=None):
self.lastservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, defaultRef, timeshiftEnabled = self.timeshiftEnabled())
def movieSelected(self, service):
ref = self.lastservice
del self.lastservice
if service is None:
if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.session.nav.playService(ref)
else:
self.session.open(MoviePlayer, service, slist = self.servicelist, lastservice = ref)
def openTimerList(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
def showMediaPlayer(self):
try:
from Plugins.Extensions.MediaPlayer.plugin import MediaPlayer
self.session.open(MediaPlayer)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaPlayer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
class MoviePlayer(InfoBarBase, InfoBarShowHide, \
InfoBarMenu, \
InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, InfoBarAudioSelection, HelpableScreen, InfoBarNotifications,
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarSimpleEventView,
InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, Screen, InfoBarTeletextPlugin,
InfoBarServiceErrorPopupSupport, InfoBarExtensions, InfoBarPlugins, InfoBarPiP):
ENABLE_RESUME_SUPPORT = True
ALLOW_SUSPEND = True
def __init__(self, session, service, slist = None, lastservice = None):
Screen.__init__(self, session)
self["actions"] = HelpableActionMap(self, "MoviePlayerActions",
{
"leavePlayer": (self.leavePlayer, _("leave movie player...")),
"leavePlayerOnExit": (self.leavePlayerOnExit, _("leave movie player..."))
})
self["DirectionActions"] = HelpableActionMap(self, "DirectionActions",
{
"left": self.left,
"right": self.right
}, prio = -2)
self.allowPiP = True
for x in HelpableScreen, InfoBarShowHide, InfoBarMenu, \
InfoBarBase, InfoBarSeek, InfoBarShowMovies, InfoBarInstantRecord, \
InfoBarAudioSelection, InfoBarNotifications, InfoBarSimpleEventView, \
InfoBarServiceNotifications, InfoBarPVRState, InfoBarCueSheetSupport, \
InfoBarMoviePlayerSummarySupport, InfoBarSubtitleSupport, \
InfoBarTeletextPlugin, InfoBarServiceErrorPopupSupport, InfoBarExtensions, \
InfoBarPlugins, InfoBarPiP:
x.__init__(self)
self.servicelist = slist
self.lastservice = lastservice or session.nav.getCurrentlyPlayingServiceOrGroup()
session.nav.playService(service)
self.cur_service = service
self.returning = False
self.onClose.append(self.__onClose)
def __onClose(self):
from Screens.MovieSelection import playlist
del playlist[:]
self.session.nav.playService(self.lastservice)
def handleLeave(self, how):
self.is_closing = True
if how == "ask":
if config.usage.setup_level.index < 2: # -expert
list = (
(_("Yes"), "quit"),
(_("No"), "continue")
)
else:
list = (
(_("Yes"), "quit"),
(_("Yes, returning to movie list"), "movielist"),
(_("Yes, and delete this movie"), "quitanddelete"),
(_("No"), "continue"),
(_("No, but restart from begin"), "restart")
)
from Screens.ChoiceBox import ChoiceBox
self.session.openWithCallback(self.leavePlayerConfirmed, ChoiceBox, title=_("Stop playing this movie?"), list = list)
else:
self.leavePlayerConfirmed([True, how])
def leavePlayer(self):
setResumePoint(self.session)
self.handleLeave(config.usage.on_movie_stop.value)
def leavePlayerOnExit(self):
if self.shown:
self.hide()
elif self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.leave_movieplayer_onExit.value == "popup":
self.session.openWithCallback(self.leavePlayerOnExitCallback, MessageBox, _("Exit movie player?"), simple=True)
elif config.usage.leave_movieplayer_onExit.value == "without popup":
self.leavePlayerOnExitCallback(True)
def leavePlayerOnExitCallback(self, answer):
if answer == True:
setResumePoint(self.session)
self.handleLeave("quit")
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def deleteConfirmed(self, answer):
if answer:
self.leavePlayerConfirmed((True, "quitanddeleteconfirmed"))
def leavePlayerConfirmed(self, answer):
answer = answer and answer[1]
if answer is None:
return
if answer in ("quitanddelete", "quitanddeleteconfirmed"):
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
serviceHandler = enigma.eServiceCenter.getInstance()
if answer == "quitanddelete":
msg = ''
if config.usage.movielist_trashcan.value:
import Tools.Trashcan
try:
trash = Tools.Trashcan.createTrashFolder(ref.getPath())
Screens.MovieSelection.moveServiceFiles(ref, trash)
# Moved to trash, okay
self.close()
return
except Exception, e:
print "[InfoBar] Failed to move to .Trash folder:", e
msg = _("Cannot move to trash can") + "\n" + str(e) + "\n"
info = serviceHandler.info(ref)
name = info and info.getName(ref) or _("this recording")
msg += _("Do you really want to delete %s?") % name
self.session.openWithCallback(self.deleteConfirmed, MessageBox, msg)
return
elif answer == "quitanddeleteconfirmed":
offline = serviceHandler.offlineOperations(ref)
if offline.deleteFromDisk(0):
self.session.openWithCallback(self.close, MessageBox, _("You cannot delete this!"), MessageBox.TYPE_ERROR)
return
if answer in ("quit", "quitanddeleteconfirmed"):
self.close()
elif answer == "movielist":
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.returning = True
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref)
self.session.nav.stopService()
elif answer == "restart":
self.doSeek(0)
self.setSeekState(self.SEEK_STATE_PLAY)
elif answer in ("playlist","playlistquit","loop"):
( next_service, item , lenght ) = self.getPlaylistServiceInfo(self.cur_service)
if next_service is not None:
if config.usage.next_movie_msg.value:
self.displayPlayedName(next_service, item, lenght)
self.session.nav.playService(next_service)
self.cur_service = next_service
else:
if answer == "playlist":
self.leavePlayerConfirmed([True,"movielist"])
elif answer == "loop" and lenght > 0:
self.leavePlayerConfirmed([True,"loop"])
else:
self.leavePlayerConfirmed([True,"quit"])
elif answer in ("repeatcurrent"):
if config.usage.next_movie_msg.value:
(item, lenght) = self.getPlaylistServiceInfo(self.cur_service)
self.displayPlayedName(self.cur_service, item, lenght)
self.session.nav.stopService()
self.session.nav.playService(self.cur_service)
def doEofInternal(self, playing):
if not self.execing:
return
if not playing :
return
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
delResumePoint(ref)
self.handleLeave(config.usage.on_movie_eof.value)
def up(self):
slist = self.servicelist
if slist and slist.dopipzap:
slist.moveUp()
self.session.execDialog(slist)
else:
self.showMovies()
def down(self):
slist = self.servicelist
if slist and slist.dopipzap:
slist.moveDown()
self.session.execDialog(slist)
else:
self.showMovies()
def right(self):
# XXX: gross hack, we do not really seek if changing channel in pip :-)
slist = self.servicelist
if slist and slist.dopipzap:
# XXX: We replicate InfoBarChannelSelection.zapDown here - we shouldn't do that
if slist.inBouquet():
prev = slist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and slist.atEnd():
slist.nextBouquet()
else:
slist.moveDown()
cur = slist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
slist.moveDown()
slist.zap(enable_pipzap = True)
else:
InfoBarSeek.seekFwd(self)
def left(self):
slist = self.servicelist
if slist and slist.dopipzap:
# XXX: We replicate InfoBarChannelSelection.zapUp here - we shouldn't do that
if slist.inBouquet():
prev = slist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if slist.atBegin():
slist.prevBouquet()
slist.moveUp()
cur = slist.getCurrentSelection()
if not cur or (not (cur.flags & 64)) or cur.toString() == prev:
break
else:
slist.moveUp()
slist.zap(enable_pipzap = True)
else:
InfoBarSeek.seekBack(self)
def showPiP(self):
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
slist.togglePipzap()
del self.session.pip
self.session.pipshown = False
else:
from Screens.PictureInPicture import PictureInPicture
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
self.session.pipshown = True
self.session.pip.playService(slist.getCurrentSelection())
def swapPiP(self):
pass
def showMovies(self):
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.playingservice = ref # movie list may change the currently playing
self.session.openWithCallback(self.movieSelected, Screens.MovieSelection.MovieSelection, ref)
def movieSelected(self, service):
if service is not None:
self.cur_service = service
self.is_closing = False
self.session.nav.playService(service)
self.returning = False
elif self.returning:
self.close()
else:
self.is_closing = False
ref = self.playingservice
del self.playingservice
# no selection? Continue where we left off
if ref and not self.session.nav.getCurrentlyPlayingServiceOrGroup():
self.session.nav.playService(ref)
def getPlaylistServiceInfo(self, service):
from MovieSelection import playlist
for i, item in enumerate(playlist):
if item == service:
if config.usage.on_movie_eof.value == "repeatcurrent":
return (i+1, len(playlist))
i += 1
if i < len(playlist):
return (playlist[i], i+1, len(playlist))
elif config.usage.on_movie_eof.value == "loop":
return (playlist[0], 1, len(playlist))
return ( None, 0, 0 )
def displayPlayedName(self, ref, index, n):
from Tools import Notifications
Notifications.AddPopup(text = _("%s/%s: %s") % (index, n, self.ref2HumanName(ref)), type = MessageBox.TYPE_INFO, timeout = 5)
def ref2HumanName(self, ref):
return enigma.eServiceCenter.getInstance().info(ref).getName(ref)
|
postla/e2-gui
|
lib/python/Screens/InfoBar.py
|
Python
|
gpl-2.0
| 16,344
|
#!/usr/bin/python3
import random
from itertools import chain, repeat
cards = {
'green': 0,
'white': 0,
'blue': 0,
'red': 0,
'gold': 0,
'colorless': 0
}
def set_cards():
for color in cards:
prompt_string = '# of {} cards: '.format(color.capitalize())
prompts = chain([prompt_string], repeat("Not a valid number. Try again: "))
replies = map(input, prompts)
valid_response = next(filter(str.isdigit, replies))
cards[color] = int(valid_response)
return cards
set_cards()
boost_size = int(input('# of cards per booster pack: '))
booster_index = 0
booster_counter = 0
boost_again = True
boost_success = True
while boost_again:
booster_counter += 1
print ("\nGenerating booster pack #" + str(booster_counter) + "\n")
boost_query = ' '
for booster_index in range(0, boost_size):
card_total = sum(cards.values())
if card_total == 0 :
boost_success = False
print("Insufficient cards to complete booster pack.\n")
break
card_selected = random.choice([color for color in cards if cards[color] > 0])
cards[card_selected] -= 1
print( str(booster_index + 1) + ". ) " + card_selected.capitalize())
if boost_success :
print("\nBooster pack complete.\n")
while (boost_query.lower() != 'y' and boost_query.lower() != 'n'):
if card_total == 0:
print('Not enough remaining cards to complete a booster pack')
print('\nExiting program.')
boost_query = 'n'
else:
boost_query = input('Create a new booster pack? (y/n)')
if boost_query.lower() == 'n':
boost_again = False
|
LudwigTirazona/mtg-booster-maker
|
booster-maker.py
|
Python
|
gpl-2.0
| 1,728
|
from pydomainr import PyDomainr
dom = PyDomainr("naumanahmad.com")
for i in dom.taken_domains():
print i
|
davidhax0r/PyDomainr
|
tests.py
|
Python
|
mit
| 109
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-14 21:19
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('logger', '0004_tidy_progress_range'),
]
operations = [
migrations.AddField(
model_name='attemptlog',
name='error',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='examattemptlog',
name='error',
field=models.BooleanField(default=False),
),
]
|
DXCanas/kolibri
|
kolibri/core/logger/migrations/0005_auto_20180514_1419.py
|
Python
|
mit
| 638
|
#!/usr/bin/python -Wall
# ================================================================
# John Kerl
# kerl.john.r@gmail.com
# 2005-11-07
#
# This is a Python library for simple I/O and arithmetic on vectors
# and matrices of floating-point numbers.
#
# Why not use packages such as Numpy? Sometimes, I prefer to have a small,
# simple set of routines which I wrote and completely understand, which do no
# more and no less than what I want them to do. For black-box linear-algebra
# software, you're better off with Matlab, Numpy, etc. The code here is of a
# different sort: simple and simple-minded, and intended to be not just used
# but also read.
#
# ================================================================
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
import sys
import copy
import re
import math
import array # For binary I/O
import types
from cplxreal_m import *
# ----------------------------------------------------------------
def frac_reader(s):
pieces = re.split(r'/', s, 2)
if len(pieces) == 2:
return float(pieces[0]) / float(pieces[1])
else:
return float(s)
# ----------------------------------------------------------------
# A keystroke-saver for the matrix constructor.
def m(list):
return sackmat(list)
# ----------------------------------------------------------------
def sm_is_list(p):
return type(p) == type([0])
# ----------------------------------------------------------------
def check_same_matrix_dims(A, B, func_name):
# To do: check is-list twice
[anr, anc] = A.dims()
[bnr, bnc] = B.dims()
if (anr != bnr) or (anc != bnc):
print >> sys.stderr, "%s: mismatched lengths %dx%d, %dx%d." \
% (func_name, anr, anc, bnr, bnc)
sys.exit(1)
return [anr, anc]
# ----------------------------------------------------------------
def check_mul_matrix_dims(A, B, func_name):
# To do: check is-list x 4
[anr, anc] = A.dims()
[bnr, bnc] = B.dims()
if (anc != bnr):
print >> sys.stderr, "%s: mismatched mul lengths %dx%d, %dx%d." % (func_name, anr, anc, bnr, bnc)
sys.exit(1)
return [anr, anc, bnr, bnc]
# ----------------------------------------------------------------
# Use the metric induced by the max norm.
def are_close_in_max_norm(A, B, tol = 1e-10):
[nr, nc] = check_same_matrix_dims(A, B, "sackmat are_close_in_max_norm")
for i in range(0, nr):
for j in range(0, nc):
d = abs(A[i][j] - B[i][j])
if (d > tol):
return 0
return 1
# ----------------------------------------------------------------
def make_zero_matrix(nr, nc):
row = [0] * nc
elts = []
for i in range(0, nr):
elts.append(copy.copy(row))
return sackmat(elts)
# ----------------------------------------------------------------
def make_identity_matrix(n):
I = make_zero_matrix(n, n)
for i in range(0, n):
I[i][i] = 1
return I
# ----------------------------------------------------------------
# This is simply a test pattern.
# 1 2 3
# 4 5 6
# 7 8 9
def make_seq_matrix(nr, nc):
A = make_zero_matrix(nr, nc)
k = 0
for i in range(0, nr):
for j in range(0, nc):
k += 1
A[i][j] = k
return A
# Same, except non-singular.
def make_nseq_matrix(nr, nc):
A = make_zero_matrix(nr, nc)
k = 0
for i in range(0, nr):
for j in range(0, nc):
k += 1
if (i == j):
A[i][j] = -k
else:
A[i][j] = k
return A
# ----------------------------------------------------------------
def matrix_times_vector(A, v):
Av = []
[nr, nc] = A.dims()
n = len(v)
for i in range(0, nr):
Av.append(vecdot(A[i], v))
return Av
# ----------------------------------------------------------------
def vector_times_matrix_times_vector(u, A, v):
return vecdot(u, matrix_times_vector(A, v))
# ----------------------------------------------------------------
def vecadd(u, v):
#n = check_same_list_dims(u, v, "sackmat vecadd")
n = len(u)
w = []
for i in range(0, n):
w.append(u[i] + v[i])
return w
# ----------------------------------------------------------------
def vecaddip(u, v):
#n = check_same_list_dims(u, v, "sackmat vecadd")
n = len(u)
for i in range(0, n):
u[i] += v[i]
# ----------------------------------------------------------------
def vecsadd(u, v, s):
#n = check_same_list_dims(u, v, "sackmat vecadd")
n = len(u)
w = []
for i in range(0, n ):
w.append(u[i] + s*v[i])
return w
# ----------------------------------------------------------------
def vecsaddip(u, v, s):
#n = check_same_list_dims(u, v, "sackmat vecadd")
n = len(u)
for i in range(0, n):
u[i] += s*v[i]
# ----------------------------------------------------------------
def vecsub(u, v):
#n = check_same_list_dims(u, v, "sackmat vecsub")
n = len(u)
w = []
for i in range(0, n):
w.append(u[i] - v[i])
return w
# ----------------------------------------------------------------
def vecmul(u, v):
#n = check_same_list_dims(u, v, "sackmat vecsub")
n = len(u)
w = []
for i in range(0, n):
w.append(u[i] * v[i])
return w
# ----------------------------------------------------------------
def vecdiv(u, v):
#n = check_same_list_dims(u, v, "sackmat vecsub")
n = len(u)
w = []
for i in range(0, n):
w.append(u[i] / v[i])
return w
# ----------------------------------------------------------------
def vecssub(u, v, s):
#n = check_same_list_dims(u, v, "sackmat vecsub")
n = len(u)
w = []
for i in range(0, n ):
w.append(u[i] - s*v[i])
return w
# ----------------------------------------------------------------
def vecdot(u, v):
s = 0
n = len(u)
#n = check_same_list_dims(u, v, "sackmat vecdot")
for i in range(0, n):
s += u[i] * v[i]
return s
# ----------------------------------------------------------------
def vecpair(u, v):
s = 0
n = len(u)
#n = check_same_list_dims(u, v, "sackmat vecdot")
for i in range(0, n):
ui = u[i]
s += u[i] * v[i]
return s
# ----------------------------------------------------------------
def outer(u, v):
m = len(u)
n = len(v)
uv = make_zero_matrix(m, n)
for i in range(0, m):
for j in range(0, n):
uv[i][j] = u[i] * v[j]
return uv
# ----------------------------------------------------------------
def outer1(u):
return outer(u, u)
# ----------------------------------------------------------------
def vecnorm(u):
return math.sqrt(vecdot(u, u))
def vecnormsq(u):
return vecdot(u, u)
def normalize(u):
return vecsmul(u, 1.0 / vecnorm(u))
# ----------------------------------------------------------------
def vecsmul(u, s):
v = []
for ue in u:
v.append(ue * s)
return v
# ----------------------------------------------------------------
def vecsdiv(u, d):
v = []
for ue in u:
v.append(ue / d)
return v
# ----------------------------------------------------------------
def vechat(u):
return vecsmul(u, 1.0/vecnorm(u))
# ----------------------------------------------------------------
def vec_contract(u):
sum = 0.0
n = len(u)
for i in range(0, n):
sum += u[i]
return sum
# ----------------------------------------------------------------
def print_row_vector(v, format="%11.7f"):
n = len(v)
for i in range(0, n):
print format % (v[i]),
print
def print_row_vector_no_cr(v, format="%11.7f"):
n = len(v)
for i in range(0, n):
print format % (v[i]),
def print_row_vectors(vs, format="%11.7f"):
nv = len(vs)
for j in range(0, nv):
print_row_vector(vs[j])
# ----------------------------------------------------------------
def print_column_vector(v, format="%11.7f"):
n = len(v)
for i in range(0, n):
print format % (v[i])
def print_column_vector_to_file(v, file_name, format="%11.7f"):
if (file_name == "-"):
file_handle = sys.stdout
else:
try:
file_handle = open(file_name, 'w')
except:
print >> sys.stderr, \
"Couldn't open \"" + file_name + "\" for write."
sys.exit(1)
n = len(v)
for i in range(0, n):
string = format % (v[i])
file_handle.write(string)
file_handle.write('\n')
if (file_name != "-"):
file_handle.close()
# ----------------------------------------------------------------
def row_vector_from_string(orig_line, elt_scanner):
v = []
line = copy.copy(orig_line)
if line == '':
return v
# Chomp trailing newline, if any.
if line[-1] == '\n':
line = line[0:-1]
# Strip leading and trailing whitespace.
line = re.sub(r"^\s+", r"", line)
line = re.sub(r"\s+$", r"", line)
if line == '':
return v
# Tokenize.
strings = re.split(r"\s+", line)
# Scan. Try exception?
for s in strings:
elt = elt_scanner(s)
v.append(elt)
return v
# ----------------------------------------------------------------
def read_row_vector(elt_scanner, file_name = "-"):
v = []
if file_name == '-':
file_handle = sys.stdin
else:
try:
file_handle = open(file_name, 'r')
except:
print >> sys.stderr, "Couldn't open \"" + file_name + "\" for read."
sys.exit(1)
# Note that "for line in sys.stdin" slurps *all* the input.
# We may not want all of it.
line = ""
while (line == ""):
line = file_handle.readline()
v = row_vector_from_string(line, elt_scanner)
if v == []:
print >> sys.stderr, "sackmat read_row_vector: empty input."
sys.exit(1)
if file_name != '-':
file_handle.close()
return v
# ----------------------------------------------------------------
def read_column_vector(elt_scanner, file_name = "-"):
v = []
if file_name == '-':
file_handle = sys.stdin
else:
try:
file_handle = open(file_name, 'r')
except:
print >> sys.stderr, "Couldn't open \"" + file_name + "\" for read."
sys.exit(1)
# Note that "for line in sys.stdin" slurps *all* the input.
# We may not want all of it.
while (1):
line = file_handle.readline()
if line == '':
break
# Strip comments.
line = re.sub(r"#.*", r"", line)
# Strip leading and trailing whitespace.
line = re.sub(r"^\s+", r"", line)
line = re.sub(r"\s+$", r"", line)
# Skip blank lines.
if re.match(r"^$", line):
continue
elt = elt_scanner(line)
v.append(elt)
if file_name != '-':
file_handle.close()
return v
# ----------------------------------------------------------------
# Standard basis vector.
def stdbv(i, n):
ei = [0] * n
ei[i] = 1
return ei
# ----------------------------------------------------------------
# Utility routine for row reduction
# Return value: True/false (if index was found), and index
def find_leader_pos(v, tol=1e-7):
n = len(v)
for j in range(0, n):
if abs(v[j]) >= tol:
return [1, j]
return [0, 0]
def tol_zero(x, tol=1e-7):
if abs(x) < tol:
return 1
else:
return 0
# ----------------------------------------------------------------
def vector_is_zero(v, tol=1e-7):
n = len(v)
for i in range(0, n):
if not tol_zero(v[i]):
return 0
return 1
# ----------------------------------------------------------------
# projpar: Returns the component of u which is parallel to a.
# projperp: Returns the component of u which is perpendicular to a.
#
# * u = u_par + u_perp where the former is parallel to a and the latter
# is perpendicular.
# * Thus u_perp = u - u_par
# * u_par has magnitude ||u|| cos theta, and direction a.
# * u dot a is ||u|| ||a|| cos theta
# * u_par = ||u|| cos theta a^hat
# = ||u|| cos theta a / ||a||
# = (u dot a) a / ||a||^2
# = (u dot a) a / (a dot a)
# which is a familiar construction from the Gram-Schmidt process.
def projpar(u, a):
n = len(u)
ua = vecdot(u, a)
aa = vecdot(a, a) # To do: needs divide-by-zero check
# The cast to float is in case the inputs are integers.
u_par = vecsmul(a, float(ua)/float(aa))
return u_par
def projperp(u, a):
u_par = projpar(u, a)
u_perp = vecsub(u, u_par)
return u_perp
# ----------------------------------------------------------------
# Pastes column vectors into a matrix.
# Example:
#
# u = [1] v = [3] w = [5]
# [2] [4] [6]
#
# A = [1 3 5]
# [2 4 6]
#
# Sample syntax:
# A = sackmat_m.paste_column_vectors([[1, 2], [3, 4], [5, 6]])
def paste_column_vectors(vectors):
nr = len(vectors[0])
nc = len(vectors)
A = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
A[i][j] = vectors[j][i]
return A
# ----------------------------------------------------------------
# Pastes row vectors into a matrix.
# Example:
#
# u = [1 2]
# v = [3 4]
# w = [5 6]
#
# [1 2]
# A = [3 4]
# [5 6]
#
# Sample syntax:
# A = sackmat_m.paste_row_vectors([[1, 2], [3, 4], [5, 6]])
def paste_row_vectors(vectors):
nr = len(vectors)
nc = len(vectors[0])
A = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
A[i][j] = vectors[i][j]
return A
# ----------------------------------------------------------------
# basis_coeffs():
# Coefficients of v with respect to a basis, in the general case.
#
# Example:
#
# v = [3] <---- Input vector
# [4]
#
# u1 = [ 1] u2 = [1] <---- Basis vectors
# [-1] [2]
#
# v = a[ 1] + b[1] <---- a, b are the coefficients to be determined.
# [-1] [2]
#
# = [ a] + [b] = [ a + b] = [ 1 1] [a] = [u1 | u2] [a]
# [-a] [2b] [-a + 2b] [-1 2] [b] [b]
#
# so
#
# [a] = [u1 | u2]^-1 v = [ 1 | 1]^-1 [3]
# [b] [-1 | 2] [4]
#
# = [2/3 | -1/3] [3] = [2/3]
# [1/3 | 1/3] [4] [7/3]
#
# Check:
#
# 2/3[ 1] + 7/3[1] = [ 2/3] + [ 7/3] = [3] = v.
# [-1] [2] [-2/3] [14/3] [4]
#
# Sample syntax:
# basis = [[1, -1], [1, 2]]
# v = [3, 4]
# c = sackmat_m.basis_coeffs(v, basis)
# which results in
# v = [3, 4]
# c = [0.66666666666666652, 2.333333333333333]
def basis_coeffs(v, basis):
A = paste_column_vectors(basis)
Ai = A.inv()
return Ai * v
# ----------------------------------------------------------------
# basis_coeffs_on():
# Coefficients of v with respect to an orthonormal basis.
#
# An arbitrary vector v is
#
# v = sum_{j=1}^N c_j u_j
#
# but dot with u_i:
#
# <u_i, v> = <u_i, sum_{j=1}^N c_j u_j>
# = sum_{j=1}^N c_j <u_i, u_j>
# = sum_{j=1}^N c_j delta_ij
# = c_i
#
# i.e. c_i = <u_i, v>.
def basis_coeffs_on(v, basis):
n = len(basis[0])
coeffs = []
for j in range(0, n):
coeffs.append(vecdot(basis[j], v))
return coeffs
# ----------------------------------------------------------------
# Sample syntax:
# u = sackmat_m.linear_combination([2, 3], [[1,0,0],[0,0,1]])
# v = sackmat_m.linear_combination([2, 3, 4], [[1,0],[0,1],[100,200]])
# which results in
# u = [2.0, 0.0, 3.0]
# v = [402.0, 803.0]
def linear_combination(coeffs, vectors):
n = len(vectors[0])
v = [0.0] * n
nvec = len(vectors)
for veci in range(0, nvec):
for eltj in range(0, n):
v[eltj] += coeffs[veci] * vectors[veci][eltj]
return v
# ----------------------------------------------------------------
# Q = I - 2 v v^t / (v^t v)
#
# Reflection matrices have determinant -1. However, if v is the zero vector
# (or nearly so), the best we can do is to hand back the identity matrix, which
# has determinant +1. The return value is that sign.
def householder_vector_to_Q(v):
# To do: package the tol ...
tol = 1e-10
n = len(v)
v_dot_v = vecdot(v, v)
Q = make_identity_matrix(n)
sign = 1
if v_dot_v >= tol:
two_over_v_dot_v = 2.0 / v_dot_v
for i in range(0, n):
for j in range(0, n):
Q[i][j] -= v[i]*v[j] * two_over_v_dot_v
sign = -1
return [Q, sign]
# ----------------------------------------------------------------
def validate_matrix(A, func_name):
#To do: is-list A -- put into sackutil
nr = len(A)
if nr < 1:
print >> sys.stderr, func_name + ": no rows."
sys.exit(1)
nc = len(A[0])
if nc < 1:
print >> sys.stderr, func_name + ": empty row."
sys.exit(1)
for row in A:
if len(row) != nc:
print >> sys.stderr, func_name + ": ragged input."
sys.exit(1)
return [nr, nc]
# ================================================================
class sackmat:
def __init__(self, elements):
# To do: validate non-ragged
self.elements = copy.deepcopy(elements)
def dims(self):
return [len(self.elements), len(self.elements[0])]
def square_dim(self):
[nr, nc] = self.dims()
if nr != nc:
print >> sys.stderr, "Non-square input."
sys.exit(1)
return nr
def num_rows(self):
return len(self.elements)
def num_cols(self):
return len(self.elements[0])
def fill_zero_matrix(self):
[nr, nc] = self.dims()
for i in range(0, nr):
for j in range(0, nc):
self.elements[i][j] = 0.0
def fill_identity_matrix(self):
n = self.square_dim()
for i in range(0, n):
for j in range(0, n):
self.elements[i][j] = 0.0
for i in range(0, n):
self.elements[i][i] = 1.0
def __getitem__(self, i):
return self.elements[i]
def __setitem__(self, i, value):
self.elements[i] = value
def __add__(A, B):
[nr, nc] = check_same_matrix_dims(A, B, "sackmat add")
C = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
C[i][j] = A[i][j] + B[i][j]
return C
def __sub__(A, B):
[nr, nc] = check_same_matrix_dims(A, B, "sackmat sub")
C = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
C[i][j] = A[i][j] - B[i][j]
return C
# We can get here via matrix times something, i.e. A*B where A is a matrix.
# B can be whatever, but currently I support only B being a matrix (sackmat
# object), vector (list), or scalar.
#
# A Python implementation detail: If one puts the scalar on the left, e.g.
# 2.0 * A, we won't even get called here.
def __mul__(A, B):
B_is_matrix = 1
try:
x = B.elements
except:
B_is_matrix = 0
if B_is_matrix:
[anr, anc, bnr, bnc] = check_mul_matrix_dims(A, B, "sackmat mul")
C = make_zero_matrix(anr, bnc)
for i in range(0, anr):
for j in range(0, bnc):
C[i][j] = 0
for k in range(0, anc):
C[i][j] += A[i][k] * B[k][j]
return C
elif sm_is_list(B):
return matrix_times_vector(A, B)
else:
[anr, anc] = A.dims()
C = make_zero_matrix(anr, anc)
for i in range(0, anr):
for j in range(0, anc):
C[i][j] = A[i][j] * B
return C
def __neg__(A):
[nr, nc] = A.dims()
C = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
C[i][j] = -A[i][j]
return C
def __pow__(A, n):
if type(n) != type(1):
print >> sys.stderr, \
"sackmat __pow__: exponent <<", n, ">> is non-integer."
sys.exit(1)
if n < 0:
Ap = A.inv()
n = -n
elif n == 0:
return A*A.inv()
else:
Ap = copy.deepcopy(A)
A2 = Ap
n -= 1
while (n > 0):
if n & 1:
Ap = Ap * A2
n = n >> 1
A2 = A2 * A2
return Ap
# Overload the % operator for Kronecker product.
def __mod__(A, B):
return kronecker_product(A, B)
def copy_elements_from(self, other):
check_same_matrix_dims(self, other, "copy_elements_from")
[nr, nc] = self.dims()
for i in range(0, nr):
for j in range(0, nc):
self.elements[i][j] = other.elements[i][j]
def copy_elements_to(self, other):
check_same_matrix_dims(self, other, "copy_elements_to")
[nr, nc] = self.dims()
for i in range(0, nr):
for j in range(0, nc):
other.elements[i][j] = self.elements[i][j]
def smul(self, s):
[nr, nc] = self.dims()
C = make_zero_matrix(nr, nc)
for i in range(0, nr):
for j in range(0, nc):
C[i][j] = s * self.elements[i][j]
return C
def smul_in_place(self, s):
[nr, nc] = self.dims()
for i in range(0, nr):
for j in range(0, nc):
self.elements[i][j] = s * self.elements[i][j]
def to_scalar(self):
[nr, nc] = self.dims()
if (nr != 1 or nc != 1):
print >> sys.stderr, \
"sackmat to_scalar: input dimensions must be 1x1; got %dx%d." \
% (nr, nc)
sys.exit(1)
# Also try to hande the case when it's bigger than 1x1, but
# is a scalar multiple of the identity (to within a tolerance).
return self.elements[0][0]
def transpose(self):
[nr, nc] = self.dims()
C = make_zero_matrix(nc, nr)
for i in range(0, nr):
for j in range(0, nc):
C[j][i] = self.elements[i][j]
return C
def conjugate_transpose(self):
[nr, nc] = self.dims()
C = make_zero_matrix(nc, nr)
for i in range(0, nr):
for j in range(0, nc):
C[j][i] = conj(self.elements[i][j])
return C
def star(self):
return self.conjugate_transpose()
# 0 1 2 3 4 5
# 0 . . . . . .
# 1 o . . . . .
# 2 o o . . . .
# 3 o o o . . .
# 4 o o o o . .
# 5 o o o o o .
def transpose_in_place(self):
n = self.square_dim()
for i in range(0, n):
for j in range(0, i):
temp = self.elements[i][j]
self.elements[i][j] = self.elements[j][i]
self.elements[j][i] = temp
def A_star_A(self):
n = self.square_dim()
C = make_zero_matrix(n, n)
for i in range(0, n):
for j in range(0, n):
C[i][j] = 0.0
for k in range(0, n):
C[i][j] += conj(self.elements[k][i]) * self.elements[k][j]
return C
def __str__(self):
[nr, nc] = self.dims()
mat_string = ""
for i in range(0, nr):
row_string = str(self.elements[i][0])
for j in range(1, nc):
row_string += " " + str(self.elements[i][j])
mat_string += row_string + "\n"
return mat_string
def __repr__(self):
return self.__str__()
def printp(self, name, format="%11.7f"):
print "%s:" % (name)
self.printf(format)
print
def printf(self, format="%11.7f"):
[nr, nc] = self.dims()
if isinstance(self.elements[0][0], complex):
# This is a hack -- hard-coding %.6f for the imaginary part. I
# want complexes printed without any embedded whitespace, so that
# my reader routine can naively isolate matrix elements by looking
# at whitespace.
cpformat = format + "+" + "%.6fj"
cnformat = format + "-" + "%.6fj"
for i in range(0, nr):
for j in range(0, nc):
rr = self.elements[i][j].real
ii = self.elements[i][j].imag
#print format % (rr),
#print format % (ii),
# Use abs to handle the fact that IEEE floating-point has
# signed zero. I don't want to be printing "3+-0j" when
# printing a conjugated real.
if (ii < 0.0):
print cnformat % (rr, abs(ii)),
else:
print cpformat % (rr, abs(ii)),
print
else:
for i in range(0, nr):
for j in range(0, nc):
print format % (self.elements[i][j]),
print
return
def get_submatrix_column(self, colidx, start_row):
[nr, nc] = self.dims()
submatrix_column = []
for src in range(start_row, nr):
submatrix_column.append(self.elements[src][colidx])
return submatrix_column
def put_submatrix_column(self, colidx, start_row, column):
[nr, nc] = self.dims()
src = 0
for dst in range(start_row, nr):
self.elements[dst][colidx] = column[src]
src += 1
return
# ----------------------------------------------------------------
# Example:
# Let A be 6 x 8 and Q be 4 x 4.
# Start at row 3, column 3 of A.
# Q: [1 ] A: [. . . . . . . .]
# [ 1 ] [. . . . . . . .]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
# The 1's are virtual; if Q *were* 6x6, the 1's were 1's, and the blanks
# were 0's, then the same product would be obtained (at the cost of more
# arithmetic operations).
# tmp sub self
# j ------>
# Q*A: [ ] Q: [1 ] A: k [. . . . . . . .]
# [ j ----> ] [ 1 k ----> ] | [. . . . . . . .]
# [ i # o o o o o] [ i # # # # ] | [. . # o o o o o]
# [ | o o o o o o] [ | o o o o ] v [. . # o o o o o]
# [ | o o o o o o] [ | o o o o ] [. . # o o o o o]
# [ v o o o o o o] [ v o o o o ] [. . # o o o o o]
# [ ] [ 1] [. . . . . . . .]
# tmp sub self
# j ------>
# Q*A: [ ] Q: [1 ] A: k [. . . . . . . .]
# [j ----> ] [ 1 k-----> ] | [. . . . . . . .]
# i [o o # o o o o o] [ i # # # # ] | [o o # o o o o o]
# | [o o o o o o o o] [ | o o o o ] v [o o # o o o o o]
# | [o o o o o o o o] [ | o o o o ] [o o # o o o o o]
# v [o o o o o o o o] [ v o o o o ] [o o # o o o o o]
# [ ] [ 1] [. . . . . . . .]
# To do: doc self_start_col: for when previous columns are already known to be zero.
def premultiply_by_submatrix(self, sub, sub_start_row, self_start_col):
[self_nr, self_nc] = self.dims()
[sub_nr, sub_nc] = sub.dims()
[tmp_nr, tmp_nc] = [sub_nr, self_nc - self_start_col]
tmp = make_zero_matrix(tmp_nr, tmp_nc)
# First, form the product out of place.
# C[i][j] = sum_k A[i][k] B[k][j]
for tmpi in range(0, tmp_nr):
subi = tmpi
for tmpj in range(0, tmp_nc):
selfj = tmpj + self_start_col
value = 0.0
for subk in range(0, sub_nc):
selfk = sub_start_row + subk
value += sub.elements[subi][subk] * self.elements[selfk][selfj]
tmp[tmpi][tmpj] = value
# Second, copy the product back in place.
for tmpi in range(0, tmp_nr):
selfi = tmpi + sub_start_row
for tmpj in range(0, tmp_nc):
selfj = tmpj + self_start_col
self.elements[selfi][selfj] = tmp.elements[tmpi][tmpj]
# ----------------------------------------------------------------
# To do: port me from premul: this is just a stub.
# tmp self sub
# j ----> k ---->
# A*Q^t: [ i # o o o] A: i [. . # # # #] Q: [1 ]
# [ | o o o o] | [. . o o o o] [ 1 k ---->]
# [ | o o o o] | [. . o o o o] [ j # # # #]
# [ v o o o o] v [. . o o o o] [ | o o o o]
# [ o o o o] [. . o o o o] [ | o o o o]
# [ o o o o] [. . o o o o] [ v o o o o]
def postmultiply_by_submatrix_transpose(self, sub, sub_start_col):
[self_nr, self_nc] = self.dims()
[sub_nr, sub_nc] = sub.dims()
[tmp_nr, tmp_nc] = [self_nr, sub_nc]
tmp = make_zero_matrix(tmp_nr, tmp_nc)
# First, form the product out of place.
# C[i][j] = sum_k A[i][k] B[j][k]
for tmpi in range(0, tmp_nr):
selfi = tmpi
for tmpj in range(0, tmp_nc):
subj = tmpj
value = 0.0
for subk in range(0, sub_nc):
selfk = sub_start_col + subk
value += self.elements[selfi][selfk] * sub.elements[subj][subk]
tmp[tmpi][tmpj] = value
# Second, copy the product back in place.
for tmpi in range(0, tmp_nr):
selfi = tmpi
for tmpj in range(0, tmp_nc):
selfj = tmpj + sub_start_col
self.elements[selfi][selfj] = tmp.elements[tmpi][tmpj]
# ----------------------------------------------------------------
# This assumes submatrices begin on diagonal elements.
# o o o o o o
# . o o o o o
# . . o o o o
# . . . o o o
# . . . . o o
# . . . . . o
def householder_UT_pass_on_submatrix(self, submxidx, tol = 1e-5, arg_Q = 0.0):
[nr, nc] = self.dims()
height = nr - submxidx
# Get the 1st column of the submatrix
u = self.get_submatrix_column(submxidx, submxidx)
# Compute ||u|| and v.
v0 = math.sqrt(vecdot(u, u))
if (u[0] >= 0):
v0 = -v0
v = [0] * height
v[0] = v0
# Compute axis = u - v.
axis = vecsub(u, v)
# Compute the Householder transformation.
[Q, sign] = householder_vector_to_Q(axis)
# Apply the Householder transformation.
# Example:
# Let A be 6 x 8.
# Start at row 3, column 3 of A.
# Then Q is 4 x 4:
#
# Q: [1 ] A: [. . . . . . . .]
# [ 1 ] [. . . . . . . .]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
# [ o o o o] [. . o o o o o o]
self.premultiply_by_submatrix(Q, submxidx, submxidx)
# If they want an updated version of the Q matrix, give it back to
# them.
if (not isinstance(arg_Q, float)):
arg_Q.premultiply_by_submatrix(Q, submxidx, 0)
return sign
# ----------------------------------------------------------------
def householder_UT(self):
[nr, nc] = self.dims()
num_HH = nr
if (nr > nc):
num_HH = nc
sign = 1
for i in range(0, num_HH):
sign *= self.householder_UT_pass_on_submatrix(i)
return sign
# ----------------------------------------------------------------
# Decompose A into the product of orthogonal Q and upper-triangular R.
# Do this using successive Householder transformations.
# A:
# o o o o
# o o o o
# o o o o
# o o o o
# Q1 A:
# o o o o
# . o o o
# . o o o
# . o o o
# Q2 Q1 A:
# o o o o
# . o o o
# . . o o
# . . o o
# Q3 Q2 Q1 A:
# o o o o
# . o o o
# . . o o
# . . . o
# Now
# R = (Qn ... Q2 Q1) A.
# Let
# Q = Qn ... Q2 Q1.
# Then
# R = Q A.
# Orthogonal matrices Q (e.g. Householders) have Q^t Q = I so we may
# invert by transposing:
# A = Q^t R.
# To do: doc pre-call alloc
def QR_decomp(self, Q, R):
tol = 1e-5 # To do: package the tol
[nr, nc] = self.dims()
num_HH = nr
if (nr > nc):
num_HH = nc
# To do: check dims of Q and R
# To do: also cmt why not alloc here (gc ...)
Q.fill_identity_matrix()
self.copy_elements_to(R)
for i in range(0, num_HH):
R.householder_UT_pass_on_submatrix(i, tol, Q)
Q.transpose_in_place()
# ----------------------------------------------------------------
# http://en.wikipedia.org/wiki/Polar_decomposition.
# A = U P
# P = sqrt(A^* A)
# U = A P^-1
def polar_decomp(self):
Astar = self.conjugate_transpose()
ApA = Astar * self # To do: Perhaps make an A^* A method.
P = ApA.sqrt()
U = self * P.inv()
return [U, P]
# ----------------------------------------------------------------
# To do: stub: not coded yet: could this sentence bear another colon: yes it
# could: port me from HHUT.
# o o o o o o
# o o o o o o
# . o o o o o
# . o o o o o
# . o o o o o
# . o o o o o
def upper_hessenberg_pass_on_submatrix(self, colidx, tol = 1e-5):
[nr, nc] = self.dims()
if (nr > nc):
print >> sys.stderr, "upper_hessenberg_pass_on_submatrix: I can't handle nr > nc."
sys.exit(1)
height = nr - colidx - 1
if (colidx < 0 or colidx > nr or colidx > nc):
print >> sys.stderr, \
"upper_hessenberg_pass_on_submatrix: column index %d out of bounds in %d x %d." \
% (colidx, nr, nc)
sys.exit(1)
if (height < 1):
return
# Get the 1st column of the submatrix
u = self.get_submatrix_column(colidx, colidx+1)
# Compute ||u|| and v.
v0 = math.sqrt(vecdot(u, u))
if (u[0] >= 0):
v0 = -v0
v = [0] * height
v[0] = v0
# Compute axis = u - v.
axis = vecsub(u, v)
# Compute the Householder transformation.
[Q, sign] = householder_vector_to_Q(axis)
#Q.printp("Q")
# Q: [1 ] A: [o o o o o o]
# [ o o o o o] [o o o o o o]
# [ o o o o o] [. o o o o o]
# [ o o o o o] [. o o o o o]
# [ o o o o o] [. o o o o o]
# [ o o o o o] [. o o o o o]
# Q: [1 ] A: [o o o o o o]
# [ 1 ] [o o o o o o]
# [ o o o o] [. o o o o o]
# [ o o o o] [. . o o o o]
# [ o o o o] [. . o o o o]
# [ o o o o] [. . o o o o]
self.premultiply_by_submatrix(Q, colidx+1, colidx)
self.postmultiply_by_submatrix_transpose(Q, colidx+1)
# ----------------------------------------------------------------
def to_upper_hessenberg_form(self, tol = 1e-5):
[nr, nc] = self.dims()
num_UH = nr
if (nr > nc):
num_UH = nc
for i in range(0, num_UH):
self.upper_hessenberg_pass_on_submatrix(i, tol)
# ----------------------------------------------------------------
def det(self):
n = self.square_dim()
if (n == 1):
return self.elements[0][0]
if (n == 2):
a = self.elements[0][0]
b = self.elements[0][1]
c = self.elements[1][0]
d = self.elements[1][1]
return a*d - b*c
# Make a copy
A = sackmat(self.elements)
# Use Householder transformations to put the matrix into
# upper-triangular form. Each transformation is (effectively) a
# pre-multiplication by a Householder matrix with determinant -1.
# Account for this below.
sign = A.householder_UT()
# Take the product along the diagonal.
# The negative sign accounts for the factors of -1 introduced by
# the Householder transformations.
rv = sign
for i in range(0, n):
rv *= A[i][i]
return rv
# ----------------------------------------------------------------
def trace(self):
n = self.square_dim()
rv = 0.0
for i in range(0, n):
rv += self.elements[i][i]
return rv
# ----------------------------------------------------------------
# sum_i sum_k A_ik A_ki.
def trace_of_square(self):
n = self.square_dim()
sum = 0.0
for i in range(0, n):
for k in range(0, n):
sum += self.elements[i][k] * self.elements[k][i]
return sum
# ----------------------------------------------------------------
# Does not check that the input is skew-symmetric.
# Currently coded to use the recursive reduction formula:
# Pf(A) = sum_{k=1}^{N-1} (-1)^{k-1} A[0][k] Pf(Ahat[0][k])
# where Ahat[j][k] excludes the jth and kth rows and columns from A.
def pfaffian(self):
N = self.square_dim()
if (N & 1):
print >> sys.stderr, "sackmat pfaffian: input dimension must be even; got %d." \
% (N)
sys.exit(1)
if (N == 2):
return (self.elements[0][1] - self.elements[1][0]) * 0.5
sign = 1
sum = 0.0
for k in range(1, N):
Ahat = self.pfaffian_hat(0, k)
if (self.elements[0][k] != 0.0):
# Avoid needless recurision if A[0][k] is zero. This makes
# significant performance improvement for large sparse matrices.
sum = sum + sign * self.elements[0][k] * Ahat.pfaffian()
sign = sign * -1
return sum
def pfaffian_hat(self, j, k):
N = self.square_dim()
rv = make_zero_matrix(N-2, N-2)
di = 0
for si in range(0, N):
if (si != j and si != k):
dj = 0
for sj in range(0, N):
if (sj != j and sj != k):
rv[di][dj] = self[si][sj]
dj += 1
di += 1
return rv
# ----------------------------------------------------------------
def augment_I(self):
n = self.square_dim()
AI = sackmat(self.elements)
Z = [0] * n
#print "1. AI\n", self
for i in range (0, n):
AI.elements[i] = AI.elements[i] + Z # Python list concatenation
#print "2. AI\n", self
for i in range (0, n):
AI[i][n+i] = 1
return AI
# ----------------------------------------------------------------
def inv(self, tol = 1e-6):
n = self.square_dim()
twon = n + n
# First, paste the input and the identity side by side.
AI = self.augment_I()
# Second, use Householder transformations to put it into
# upper-triangular form.
AI.householder_UT()
# Third, put 1 on the left diagonal.
for i in range(0, n):
d = AI[i][i]
if (d == 0):
print >> sys.stderr, "Singular."
sys.exit(1)
elif (abs(d) < tol):
print >> sys.stderr, "Nearly singular."
sys.exit(1)
for j in range(0, twon):
AI[i][j] = AI[i][j] / d
# Fourth, clear out the rest of the left-hand side.
# 1 . . . . . . . . .
# 0 1 . . . . . . . .
# 0 0 1 . . . . . . .
# 0 0 0 1 . . . . . . <-- i
# 0 0 0 0 1 . . . . . <-- i2
i = n-2
while (i >= 0):
i2 = n-1
while (i2 > i):
mul = AI[i][i2]
for j in range(0, twon):
AI[i][j] -= AI[i2][j] * mul
i2 -= 1
i -= 1
# Fifth, obtain the inverse from the right-hand side.
for i in range(0, n):
AI.elements[i] = AI.elements[i][n:twon]
return AI
# ----------------------------------------------------------------
# This is a general row-reduction routine. It operates on the matrix
# in-place. At the moment, it uses naive pivoting, appropriate for exact
# arithmetic (e.g. finite fields). For floating-point (here), it should be
# re-coded to work harder to find the best row to pivot in.
#
# Also note that I prefer Householder-using algorithms when possible, which
# in many cases avoid the need for row-reduction and pivoting in the first
# place. For more information please see http://johnkerl.org/doc/hh.pdf
def row_reduce_below(self, tol=1e-7):
[nr, nc] = self.dims()
top_row = 0
left_col = 0
while (top_row < nr) and (left_col < nc):
# Find the nearest row with a non-zero value in this column;
# exchange that row with this one.
pivot_row = top_row
pivot_successful = 0
while (not pivot_successful and (pivot_row < nr)):
if (abs(self.elements[pivot_row][left_col]) >= tol):
if (top_row != pivot_row):
# Swap top row and pivot row
temp = self.elements[top_row]
self.elements[top_row] = self.elements[pivot_row]
self.elements[pivot_row] = temp
pivot_successful = 1
else:
pivot_row += 1
if (not pivot_successful):
left_col += 1
continue # Work on the next column.
# We can have a zero leading element in this row if it's
# the last row and full of zeroes.
top_row_lead = self.elements[top_row][left_col]
if (abs(top_row_lead) >= tol):
# Normalize this row.
inv = 1.0 / top_row_lead
for j in range(0, nc):
self.elements[top_row][j] *= inv
# Clear this column.
top_row_lead = self.elements[top_row][left_col]
for cur_row in range(top_row + 1, nr):
current_row_lead = self.elements[cur_row][left_col]
cr = self.elements[cur_row]
tr = self.elements[top_row]
for j in range(0, nc):
self.elements[cur_row][j] = cr[j] * top_row_lead - tr[j] * current_row_lead
left_col += 1
top_row += 1
return
# ----------------------------------------------------------------
# Operates on the matrix in-place.
def row_echelon_form(self, tol=1e-7):
[nr, nc] = self.dims()
self.row_reduce_below(tol)
for row in range(0, nr):
for row2 in range(row+1, nr):
[found, row2_leader_pos] = find_leader_pos(self.elements[row2], tol)
if (not found):
break
row2_leader_val = self.elements[row2][row2_leader_pos]
row_clear_val = self.elements[row][row2_leader_pos]
if (abs(row_clear_val) < tol):
continue
mul = float(row_clear_val) / float(row2_leader_val)
for j in range(0, nc):
self.elements[row][j] -= self.elements[row2][j] * mul
return
# ----------------------------------------------------------------
# This routine makes a copy of the matrix and row-reduces it. To save
# CPU cycles, use rank_rr() if the matrix is already row-reduced.
def rank(self, tol=1e-7):
Arr = sackmat(self.elements)
Arr.row_reduce_below(tol)
return Arr.rank_rr(tol)
# ----------------------------------------------------------------
# This routine assumes the matrix is already row-reduced. If not,
# use rank() instead.
def rank_rr(self, tol=1e-7):
[nr, nc] = self.dims()
rank = 0
for i in range(0, nr):
row_is_zero = 1
for j in range(0, nc):
if (abs(self.elements[i][j]) >= tol):
row_is_zero = 0
break
if (not row_is_zero):
rank += 1
return rank
# ----------------------------------------------------------------
def kernel_basis(self):
[nr, nc] = self.dims()
rr = sackmat(self.elements) # Make a copy
rr.row_echelon_form()
rank = rr.rank_rr()
dimker = nc - rank
if (dimker == 0):
return [0, 0]
kerbas = make_zero_matrix(dimker, nc)
nfree = 0; # == dimker but I'll compute it anyway
free_flags = [1] * nc
free_indices = [0] * nc
for i in range(0, rank):
[found, dep_pos] = find_leader_pos(rr[i])
if (found):
free_flags[dep_pos] = 0
for i in range(0, nc):
if (free_flags[i]):
free_indices[nfree] = i
nfree += 1
# For each free coefficient:
# Let that free coefficient be 1 and the rest be zero.
# Also set any dependent coefficients which depend on that
# free coefficient.
for i in range(0, dimker):
kerbas[i][free_indices[i]] = 1
# Matrix in row echelon form:
#
# 0210 c0 = ?? c0 = 1 c0 = 0
# 1000 c1 = -2 c2 c1 = 0 c1 = 5
# 0000 c2 = ?? c2 = 0 c2 = 1
# 0000 c3 = 0 c3 = 0 c3 = 0
# j = 0,1
# fi = 0,2
# i = 0:
# j = 0 row 0 fi 0 = row 0 c0 = 0
# j = 1 row 1 fi 0 = row 1 c0 = 0
# i = 1:
# j = 0 row 0 fi 1 = row 0 c2 = 2 dep_pos = 1
# j = 1 row 1 fi 1 = row 1 c2 = 0
# 0001
# 01?0
for j in range(0, rank):
if (tol_zero(rr[j][free_indices[i]])):
continue
[found, dep_pos] = find_leader_pos(rr[j])
if (not found):
print >> sys.stderr, "Coding error in get_kernel_basis!"
sys.exit(1)
kerbas[i][dep_pos] = -rr[j][free_indices[i]]
# To do: temp jrk 2006-11-09
# self.check_kernel_basis(kerbas, dimker)
# To do: 2007-05-15: The checker needs a fix but I don't remember the
# data set which tripped it off. I remember the problem was with
# large numbers ... the check was using absolute instead of relative error.
self.check_kernel_basis(kerbas, dimker)
return [1, kerbas]
# ----------------------------------------------------------------
def check_kernel_basis(self, kerbas, dimker):
for i in range(0, dimker):
v = kerbas[i]
Av = matrix_times_vector(self, v)
if (not vector_is_zero(Av)):
# To do: all this to stderr ...
print >> sys.stderr, "Coding error in kernel basis."
print "Coding error in kernel basis."; print
self.printp("Matrix")
print "dimker =", dimker
kerbas.printp("Basis")
print "Product at row " + str(i) + ":"
print_row_vector(Av)
sys.exit(1)
# ----------------------------------------------------------------
def get_column(self, j):
[nr, nc] = self.dims()
v = []
for i in range(0, nr):
v.append(self.elements[i][j])
return v
# ----------------------------------------------------------------
def put_column(self, j, v):
[nr, nc] = self.dims()
for i in range(0, nr):
self.elements[i][j] = v[i]
# ----------------------------------------------------------------
# Upper Hessenberg ...
# generalize the HHUT method a bit ...
# o o o o o o
# . o o o o o
# . . o o o o
# . . . o o o
# . . . . o o
# . . . . . o
# o o o o o o
# o o o o o o
# . o o o o o
# . . o o o o
# . . . o o o
# . . . . o o
# ----------------------------------------------------------------
# Single QR decomposition ...
# ----------------------------------------------------------------
# Naive QR eigenvalue algorithm ...
# ----------------------------------------------------------------
# QR eigenvalue algorithm with upper Hessenberg ...
# ----------------------------------------------------------------
# At 171 iterations, 171! overflows a double-precision floating-point
# number (~ 10^308).
def exp(self, tol=1e-12, maxits=165):
n = self.square_dim()
k = 0
B = make_zero_matrix(n, n)
Ak = make_identity_matrix(n)
kfact = 1
while (1):
#print "k =", k, " k! =", kfact
#print "A^%d =" % (k)
#Ak.printf()
#print
worst = 0.0
recip_kfact = 1.0 / kfact
for i in range(0, n):
for j in range(0, n):
incrij = recip_kfact * Ak[i][j]
B[i][j] += incrij
absincrij = abs(incrij)
if (absincrij > worst):
worst = absincrij
#print "worst=", worst
if (worst < tol):
break
if (k > maxits):
print >> sys.stderr, \
"sackmat_m.exp: max # iterations (%d) exceeded" \
% (maxits)
sys.exit(1)
k += 1
Ak *= self
kfact *= k
return B
# ----------------------------------------------------------------
# Denman-Beavers algorithm for matrix square root, for positive-definite A:
# http://en.wikipedia.org/wiki/Matrix_square_root.
#
# Y_0 = A
# Z_0 = I
#
# Y_{k+1} = 1/2 (Y_k + Z_k^-1)
# Z_{k+1} = 1/2 (Z_k + Y_k^-1)
#
# Y_k converges quadratically to sqrt(A) and Z_k converges to sqrt(A)^-1.
def sqrt(self):
n = self.square_dim()
Yk = copy.copy(self)
Zk = make_identity_matrix(n)
k = 0
maxiter = 100
while (k < maxiter):
Ykprev = Yk; Zkprev = Zk
Ykinv = Yk.inv(); Zkinv = Zk.inv()
Yk = Yk + Zkinv; Zk = Zk + Ykinv
Yk.smul_in_place(0.5); Zk.smul_in_place(0.5)
if (are_close_in_max_norm(Yk, Ykprev)):
return Yk
k += 1
print >> sys.stderr, "sackmat.sqrt: maxiter (%d) exceeded." \
% (maxiter)
sys.exit(1)
# ================================================================
def read_matrix(elt_scanner, file_name = "-"):
A = []
num_rows = 0
if (file_name == "-"):
file_handle = sys.stdin
else:
try:
file_handle = open(file_name, 'r')
except:
print >> sys.stderr, "Couldn't open \"" + file_name + "\" for read."
sys.exit(1)
# Note that "for line in sys.stdin" slurps *all* the input.
# We may not want all of it.
while (1):
line = file_handle.readline()
if (line == ""):
break
# Strip comments.
line = re.sub(r"#.*", r"", line)
# Strip leading and trailing whitespace.
line = re.sub(r"^\s+", r"", line)
line = re.sub(r"\s+$", r"", line)
# Skip blank lines.
if re.match(r"^$", line):
continue
v = row_vector_from_string(line, elt_scanner)
if (v == []):
if (num_rows > 0):
break
else:
A.append(v)
num_rows += 1
if (file_name != "-"):
file_handle.close()
validate_matrix(A, "read_matrix")
return sackmat(A)
# ----------------------------------------------------------------
def print_matrix(A, format="%11.7f"):
A.printf(format)
def write_matrix(A, file_name, format="%11.7f"):
[nr, nc] = A.dims()
if (file_name == "-"):
file_handle = sys.stdout
else:
try:
file_handle = open(file_name, 'w')
except:
print >> sys.stderr, "Couldn't open \"" + file_name + "\" for write."
sys.exit(1)
if isinstance(A.elements[0][0], complex):
for i in range(0, nr):
for j in range(0, nc):
if (j > 0):
file_handle.write(' ')
rr = A.elements[i][j].real
ii = A.elements[i][j].imag
#file_handle.write(format % (rr))
#file_handle.write(' ')
#file_handle.write(format % (ii))
# Use abs to handle the fact that IEEE floating-point has
# signed zero. I don't want to be printing "3+-0j" when
# printing a conjugated real.
file_handle.write(format % (rr))
if (ii < 0):
file_handle.write('-')
file_handle.write(format % (abs(ii)))
else:
file_handle.write('+')
file_handle.write(format % (abs(ii)))
file_handle.write('j')
file_handle.write('\n')
else:
for i in range(0, nr):
for j in range(0, nc):
if (j > 0):
file_handle.write(' ')
file_handle.write(format % (A.elements[i][j]))
file_handle.write('\n')
if (file_name != "-"):
file_handle.close()
def write_matrix_as_column(A, file_name, format="%11.7f"):
[nr, nc] = A.dims()
if (file_name == "-"):
file_handle = sys.stdout
else:
try:
file_handle = open(file_name, 'w')
except:
print >> sys.stderr, "Couldn't open \"" + file_name + "\" for write."
sys.exit(1)
if isinstance(A.elements[0][0], complex):
for i in range(0, nr):
for j in range(0, nc):
if (j > 0):
file_handle.write(' ')
file_handle.write(format % (A.elements[i][j].real))
file_handle.write(' ')
file_handle.write(format % (A.elements[i][j].imag))
file_handle.write('\n')
else:
for i in range(0, nr):
for j in range(0, nc):
if (j > 0):
file_handle.write(' ')
file_handle.write(format % (A.elements[i][j]))
file_handle.write('\n')
if (file_name != "-"):
file_handle.close()
# ================================================================
# Binary I/O usage example:
# from sackmat_m import *
#
# A = sackmat([[1,2,3,4],[5,6,7,8]])
# A.printf()
# write_float_matrix_binary(A, 'd')
# print "\n"
#
# B = read_float_matrix_binary(2, 4, 'd')
# B.printf()
# print "\n"
#
# C = read_fcomplex_matrix_binary(2, 2, 'd')
# C.printf()
# print "\n"
#
# write_fcomplex_matrix_binary(C, 'e')
# ----------------------------------------------------------------
def read_float_matrix_binary(nr, nc, file_name):
A = make_zero_matrix(nr, nc)
fp = open(file_name, 'rb') # Use default exception handling
bytes = fp.read(nr*nc*4)
B = array.array('f',bytes)
B.byteswap()
k = 0
for i in range(0, nr):
for j in range(0, nc):
A.elements[i][j] = B[k]
k += 1
fp.close()
return A
# ----------------------------------------------------------------
def read_fcomplex_matrix_binary(nr, nc, file_name):
A = make_zero_matrix(nr, nc)
fp = open(file_name, 'rb') # Use default exception handling
bytes = fp.read(nr*nc*8)
B = array.array('f',bytes)
B.byteswap()
k = 0
for i in range(0, nr):
for j in range(0, nc):
A.elements[i][j] = complex(B[k], B[k+1])
k += 2
fp.close()
return A
def write_float_matrix_binary(A, file_name):
[nr, nc] = A.dims()
fp = open(file_name, 'wb') # Use default exception handling
B = array.array('f')
for i in range(0, nr):
for j in range(0, nc):
B.append(A.elements[i][j])
B.byteswap()
fp.write(B)
fp.close()
# ----------------------------------------------------------------
def write_fcomplex_matrix_binary(A, file_name):
[nr, nc] = A.dims()
fp = open(file_name, 'wb') # Use default exception handling
B = array.array('f')
for i in range(0, nr):
for j in range(0, nc):
B.append(A.elements[i][j].real)
B.append(A.elements[i][j].imag)
B.byteswap()
fp.write(B)
fp.close()
# ================================================================
# Gram-Schmidt orthonormalization:
#
# Orthogonality step:
# Input {a_0 .. a_{n-1}}
# Output {q_0 .. q_{n-1}}
# q_0 = a_0
# q_j = a_j - sum_{k=0}^{j-1} (a_j dot q_k)/(q_k dot q_k) q_k
# Normalization: q_j *= 1 / ||q_j||
#
# NOTE: The matrix A is viewed as a container for row vectors.
def gram_schmidt(A, tol = 1e-7):
[nr, nc] = A.dims()
Q = sackmat(A.elements) # Make a copy
# Orthogonality
for j in range(0, nr):
qj = Q[j]
# q_j = a_j - sum_{k=0}^{j-1} (a_j dot q_k)/(q_k dot q_k) q_k
for k in range(0, j):
qk = Q[k]
numer = vecdot(qj, qk)
denom = vecdot(qk, qk)
if (abs(denom) < tol):
print >> sys.stderr, "Row ", k, " of Q is zero (or near-zero) in sackmat_m.gram_schmidt."
sys.exit(1)
quot = numer / denom
qj = vecssub(qj, qk, quot)
Q[j] = qj
# Normalization
for j in range(0, nr):
qj = Q[j]
dot = vecdot(qj, qj)
if (dot < tol):
print >> sys.stderr, "Row ", k, " of Q is zero (or near-zero) in sackmat_m.gram_schmidt."
sys.exit(1)
norm_recip = 1.0 / math.sqrt(dot)
qj = vecsmul(qj, norm_recip)
Q[j] = qj
return Q
# ----------------------------------------------------------------
def kronecker_product(A, B):
[anr, anc] = A.dims()
[bnr, bnc] = B.dims()
cnr = anr*bnr
cnc = anc*bnc
C = make_zero_matrix(cnr, cnc)
for ai in range(0, anr):
for bi in range(0, bnr):
ci = ai*bnr + bi
for aj in range(0, anc):
for bj in range(0, bnc):
cj = aj*bnc + bj
C.elements[ci][cj] = A.elements[ai][aj] * B.elements[bi][bj]
return C
# ----------------------------------------------------------------
# E.g. multikron([A, B, C]) is the Kronecker product of the matrices A, B, and
# C. The Kronecker product is associative so this is well-defined.
def multikron(mxlist):
if (mxlist == []):
return []
rv = mxlist[0]
for i in range(1, len(mxlist)):
rv = kronecker_product(rv, mxlist[i])
return rv
# ----------------------------------------------------------------
# The k-fold Kronecker product of the specified matrix A at the i'th slot
# and the identity matrix at all other slots.
#
# Example:
# multikroni(A, 2, 4)
# gives the same result as
# multikron([I, I, A, I]).
def multikroni(A, i, k):
n = A.square_dim()
I = make_identity_matrix(n)
list = []
for j in range(0, k):
if (i == j):
list.append(A)
else:
list.append(I)
return multikron(list)
# ----------------------------------------------------------------
def vkron(u, v):
m = len(u)
n = len(v)
mn = m*n
w = [0.0] * mn
k = 0
for i in range(0, m):
for j in range(0, n):
w[k] = u[i] * v[j]
k += 1
return w
# ----------------------------------------------------------------
# E.g. multivkron([u, v, w]) is the Kronecker product of the vectors u, v, and
# w. The Kronecker product is associative so this is well-defined.
def multivkron(veclist):
if (veclist == []):
return []
rv = veclist[0]
for i in range(1, len(veclist)):
rv = vkron(rv, veclist[i])
return rv
### ----------------------------------------------------------------
##def matuneg
## (aref, nr, nc) = _
##
## die "matuneg(): Need as arguments matrix reference and dimensions.\n"
## unless defined nc
##
## for (i = 0; i < nr; i++):
## for (j = 0; j < nc; j++):
## N[i][j] = -aref[i][j]
## return N
### ----------------------------------------------------------------
##def matuneg_in_place
## (aref, nr, nc) = _
##
## die
## "matuneg_in_place(): Need as arguments matrix reference and dimensions.\n"
## unless defined nc
##
## for (i = 0; i < nr; i++):
## for (j = 0; j < nc; j++):
## aref[i][j] = -aref[i][j]
# ----------------------------------------------------------------
# Jacobi real-symmetric eigensolver. At present, this is coded very naively.
# Loosely adapted from Numerical Recipes.
def rs_eigensystem(self):
# Make a copy
A = sackmat(self.elements)
n = A.square_dim()
V = make_identity_matrix(n)
maxiter = 20
iter = 0
while (1):
iter += 1
sum = 0.0
for i in range(1, n):
for j in range(0, i):
sum += abs(A.elements[i][j])
#print "sum at iteration %d is %11.7e" % (iter, sum); print
if (tol_zero(sum**2, 1e-12)):
break
if (iter > maxiter):
print >> sys.stderr, \
"Jacobi eigensolver: max iterations (%d) exceeded. Non-symmetric input?" \
% (maxiter)
sys.exit(1)
for p in range(0, n):
for q in range(p+1, n):
numer = A.elements[p][p] - A.elements[q][q]
denom = A.elements[p][q] + A.elements[q][p]
if (tol_zero(denom)):
continue
theta = (1.0*numer) / denom
sign_theta = 1
if (theta < 0):
sign_theta = -1
t = sign_theta / (abs(theta) + math.sqrt(theta**2 + 1))
c = 1.0 / math.sqrt(t**2 + 1)
s = t * c
# This is wasteful memory allocation.
P = make_identity_matrix(n)
P[p][p] = c
P[p][q] = -s
P[q][p] = s
P[q][q] = c
PT = P.transpose()
A = PT * A * P
V = V * P
#print "theta=%11.7f sign_theta=%11.7f" % (theta, sign_theta)
#print "c=%11.7f s=%11.7f" % (c, s)
#print "P^t[%d][%d]:" % (p,q); PT.printf(); print ""
#print "P[%d][%d]:" % (p,q); P.printf(); print ""
#print "A[%d][%d]:" % (p,q); A.printf(); print ""
#print "V[%d][%d]:" % (p,q); V.printf(); print ""
return [V, A]
### ----------------------------------------------------------------
##def matsmul
## my (aref, anr, anc, scalar, cref) = _
## my (i, j)
##
## die "matsmul(): Need as arguments two matrix references and dimensions.\n"
## unless defined cref
## for (i = 0; i < anr; i++):
## for (j = 0; j < anc; j++):
## cref[i][j] = scalar * aref[i][j]
# ----------------------------------------------------------------
# To do: tol_zero, tol_non_zero routines. Fold into sackutil. Also have
# the latter export the tol.
# ----------------------------------------------------------------
# Plan:
# * QR decomp (done) -> SVD? Or SVD via Jacobi? U:AAt/V:AtA? Check it out.
# - Do AtA and AAt share common eigenvalues? Does eig(AB) == eig(BA)?
# - Singular values of A are the square roots of the eigenvalues of AtA.
# This is the definition of singular value.
# * Asymmetrical eigensolver: general matrix -> UH via HH's. Then QR decomp.
# * Complexify all, after real implementation.
# ----------------------------------------------------------------
# Jacobi:
# * Re-do it legibly (non-NR).
# ----------------------------------------------------------------
# UH:
# ----------------------------------------------------------------
# QR:
# * A = input
# * A1 = A
# * A1 = Q1 R1
# * A2 = R1 Q1
# Note
# A2 = R1 Q1
# = Q1t Q1 R1 Q1
# = Q1t A1 Q1
# -- similar, so same eivals.
# * Oscillation in the presence of non-real eigenvalues?
|
johnkerl/scripts-math
|
pythonlib/sackmat_m.py
|
Python
|
bsd-2-clause
| 56,032
|
import codecs
from copy import deepcopy
import csv
import imp
import os
import profile
import re
import sys
import time
from pymongo import Connection
from django.template.defaultfilters import slugify
PARSER_PATH = os.path.abspath(os.path.dirname(__file__))
settings = imp.load_source('app_settings', os.path.join(PARSER_PATH, '../settings.py'))
MONGO = settings.MONGO
raw = os.path.join(PARSER_PATH, 'raw/')
'''
{
'building code'
'building name'
'district code'
'district name'
'year':
'dataset':
'record':
'name':
'value': value
'explainer': text
'record'
'record'
}
Data changes I made:
STATE.csv:
- in full states data, renamed columns to match data definition above.
- added full address column by combining addr2, city1, zip1.
'''
def UnicodeDictReader(utf8_data, **kwargs):
'''
from http://stackoverflow.com/questions/5004687/python-csv-dictreader-with-utf-8-data
'''
csv_reader = csv.DictReader(utf8_data, **kwargs)
for row in csv_reader:
yield dict([(key, value.decode('latin1')) for key, value in row.iteritems()])
def school_collection():
connection = Connection(MONGO['host'], MONGO['port'])
db = connection[MONGO['database']]
if MONGO['user']:
db.authenticate(MONGO['user'], MONGO['password'])
return db.schools
def remove_all():
collection = school_collection()
print str(collection.count()) + " remaining"
print "Removing all"
collection.remove({}, safe=True)
print "Done"
print str(collection.count()) + " remaining"
def get_school(building_code):
collection = school_collection()
school = collection.find_one({"Building Code": building_code})
if school != None and building_code != '00000':
school = dict(school)
collection.database.connection.disconnect()
return school
else:
collection.database.connection.disconnect()
return None
def get_district(district_code):
collection = school_collection()
district = collection.find_one({"Building Code": '00000', "District Code": district_code})
if district != None:
district = dict(district)
collection.database.connection.disconnect()
return district
else:
collection.database.connection.disconnect()
return None
def get_state():
collection = school_collection()
state = collection.find_one({"Statewide Record": True})
return state
def save(entity):
collection = school_collection()
collection.save(entity, safe=True)
collection.database.connection.end_request()
def ensure_year(record, year):
if year not in record:
record[year] = {}
return record
def ensure_dataset(record, year, dataset):
if year not in record:
record[year] = {}
if dataset not in record[year]:
record[year][dataset] = {}
return record
def parse_gradestr(string):
'''
Grade range is imported with the basic data as GRADESTR
'''
l = string.split(',')
l = [elt.strip() for elt in l] # cut out whitespace
numbers_as_strings = ['1', '2','3','4','5','6','7','8','9','10','11','12']
results = []
for elt in l:
if '-' in elt:
# This is either a grade range (eg '9-12', 'K-5')
# Or just a string ('KG-Part')
grade_range = elt.split('-')
first_is_int, second_is_int = False, False
if grade_range[0] in numbers_as_strings:
grade_range[0] = int(grade_range[0])
first_is_int = True
if grade_range[1] in numbers_as_strings:
grade_range[1] = int(grade_range[1])
second_is_int = True
if first_is_int and second_is_int:
[results.append('grade_'+str(num)) for num in range(grade_range[0], grade_range[1]+1)]
elif second_is_int:
second = grade_range[1]
while second != 0:
results.append('grade_'+str(second))
second -= 1
results.append('grade_'+grade_range[0])
else:
results.append('grade_'+grade_range[0])
else:
results.append(elt)
return results
def load_basic_data():
'''
This takes the list of all state building codes and inserts them into the
database. That data is used as the base for all other imports.
The dataset used is the
'''
print "Loading basic data"
filename = 'STATE.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = UnicodeDictReader(f, delimiter=',')
'''
We don't import some school codes because they are not immediately relevant
for the application. Here is the reference list as far as I can suss it out.
Will need to find the actual reference list.
1: No idea
2: ISDs
3: districts
4: charter?
5: Also ISDs?
6: religious
7: service buildings, planetariums, etc.
8: corrections
9: special academies (charter?)
10: state
'''
exclude = ['01', '06', '07']
# Get a database connection
collection = school_collection()
records_to_insert = []
districts_saved = []
for line in raw_data:
if line['Type'] not in exclude:
line['2009-10'] = {} # Set up empty year dictionary for future inserts.
# The state data stores building name and district name in the same
# column, so here we figure out which one we're dealing with.
if line['Building Code'] == '00000':
line['District Name'] = line['Building Name']
else:
# The state data does not include district name for schools,
# so we have to go back and get that from the data that has
# already been saved.
try:
line['District Name'] = get_district(line['District Code'])['District Name']
except:
print line['District Code']
print districts_saved
assert False
line['slug'] = slugify(line['Building Name'])
line['district-slug'] = slugify(line['District Name'])
#try:
line['FNAME'] = line['FNAME'].encode('utf-8')
line['LNAME'] = line['LNAME'].encode('utf-8')
#except:
# print "Could not handle " + line["FNAME"] + " " + line["LNAME"]
# Save the districts one-by-one so that we can refer to them
# when creating building records.
if 'District Code' in line:
if line['Building Code'] == '00000':
districts_saved.append(line['District Code'])
save(line)
else:
# Building records can be batch inserted.
records_to_insert.append(line)
else:
print line
print str(len(records_to_insert)) + " schools being inserted"
collection.insert(records_to_insert, safe=True)
collection.save({'Statewide Record': True, '2009-10': {}}) #save a dummy for the whole state.
collection.ensure_index('Building Code')
collection.ensure_index('District Code')
print str(collection.count()) + " records total"
def generate_grade_strings():
collection = school_collection()
buildings = collection.find({'Building Code': {'$ne':'00000'}})
for building in buildings:
try:
building['grades'] = parse_gradestr(building['GRADESTR'])
save(building)
except:
pass # the statewide record will throw an exception, so we ignore it.
def school_safety():
print "Loading school safety"
filename = 'School Safety Data 2009-2010-Table 1.csv'
year = '2009-10'
dataset = 'School Safety'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
state = {}
districts = {}
records_to_insert = []
for line in raw_data:
line['District Name'] = line['District Name'].rstrip()
line['District Code'] = line['District Code'].rstrip()
line['District Type'] = line['District Type'].rstrip()
line['ISD_Name'] = line['ISD_Name'].rstrip()
line['Building Code'] = line['Building Code'].rstrip()
line['Building Name'] = line['Building Name'].rstrip()
line['Building Type'] = line['Building Type'].rstrip()
line['County_Name'] = line['County_Name'].rstrip()
line['County_ID'] = line['County_ID'].rstrip()
elt = None
if '00000' not in line['Building Code']:
elt = get_school(line['Building Code'])
if '00000' in line['Building Code'] :
elt = get_district(line['District Code'])
full_record = {}
for key, value in line.iteritems():
try:
full_record[key] = { 'value': int(value) }
except:
full_record[key] = { 'value': value.rstrip() }
# TODO: calculate percentages
#for key, value in line.iteritems():
# if isinstance(full_record[key], int):
# full_record['school_pct'+key] = float(line[key]) / float()
# full_record['state_pct_'+key] = state[key]
# full_record['district_pct_'+key] = float(districts[line['District Code']][key]) / float(districts[line['District Code']][key])
# If this is an actual school (not a district or the state),
# append the district data to the school record.
if line['Building Code'] != '00000':
if line['District Code'] in districts:
district = districts[line['District Code']]
for key in full_record:
full_record[key]['district'] = 0
# full_record[key]['state'] = state[key]['value']
if '00000' in line['Building Code']:
# If this is a district, save the district data in a temporary
# dictionary, keyed by district code for esasy retrieval.
districts[line['District Code']] = full_record
if elt != None:
elt = ensure_year(elt, '2009-10')
elt = ensure_dataset(elt, '2009-10', 'school_safety')
elt['2009-10']['school_safety'] = full_record
save(elt)
else:
print line['Building Code'] + ' ' + line['Building Name'] + ' not found.'
print "Done."
def aggregate_school_safety():
records = school_collection()
districts = records.distinct('District Code')
for code in districts:
schools = records.find({'District Code': code, 'Building Code': {'$ne': '00000'}})
schools = list(schools)
if len(schools) != 0 and code and 'school_safety' in schools[0]['2009-10']:
base = schools[0]['2009-10']['school_safety']
for school in schools:
# print school['2009-10']
if 'school_safety' in school['2009-10']:
if school['2009-10']['school_safety'] == {}:
break
for key, values in school['2009-10']['school_safety'].iteritems():
if isinstance(values, dict) and isinstance(values['value'], int):
try:
base[key]['district'] += values['value']
except:
pass
for school in schools:
if 'school_safety' in school['2009-10']:
if school['2009-10']['school_safety'] == {}:
break
for key, values in school['2009-10']['school_safety'].iteritems():
if isinstance(values, dict) and isinstance(values['value'], int):
try:
school['2009-10']['school_safety'][key]['district'] = base[key]['district']
except:
pass
for school in schools:
save(school)
def meap_remove_all():
print 'Cleaning old MEAP data'
collection = school_collection()
entries = collection.find()
for entry in entries:
try:
entry['2009-10']['MEAP'] = []
except:
pass
save(entry)
def meap():
meap_remove_all()
files = [
('meap/3rd_Grade_Public.csv', '3rd Grade'),
('meap/4th_Grade_Public.csv', '4th Grade'),
('meap/5th_Grade_Public.csv', '5th Grade'),
('meap/6th_Grade_Public.csv', '6th Grade'),
('meap/7th_Grade_Public.csv', '7th Grade'),
('meap/8th_Grade_Public.csv', '8th Grade'),
('meap/9th_Grade_Public.csv', '9th Grade')
]
state = ''
for file in files:
print "Starting " + file[1]
filename = file[0]
grade = file[1]
year = '2009-10'
dataset = 'MEAP'
state_exceeded = ''
district_exceeded = {'00000':'N/A'}
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
columns_to_float = [
'AvgSS',
'StdDev',
'Level 1',
'Level 2',
'Level 3',
'Level 4',
'Met/Exceeded Standards',
'Did Not Meet Standards'
]
for header in columns_to_float:
try:
line[header] = float(line[header])
except:
line[header] = None
try:
line['Number Tested'] = int(line['Number Tested'])
except:
line['Number Tested'] = None
try:
line['Number Included'] = int(line['Number Included'])
except:
line['Number Included'] = None
entity = ''
if '00000' not in line['Building Code']:
entity = get_school(line['Building Code'])
elif '00000' not in line['District_Number']:
entity = get_district(line['District_Number'])
district_exceeded[line['District_Number']] = line['Met/Exceeded_Standards']
else:
entity = get_state()
state_exceeded = line['Met/Exceeded_Standards']
if entity != None:
# Checks if there already is 2009-10 MEAP data recorded
# for this entity
if 'MEAP' not in entity['2009-10']:
entity['2009-10']['MEAP'] = []
entity['2009-10']['MEAP'].append(
{
'subject': line['Subject'],
'grade': grade,
'value': line['Met/Exceeded_Standards'],
'district': district_exceeded[line['District_Number']],
'state':state_exceeded
})
save(entity)
def meap_district():
files = [
('meap 2009 3-9 demographics/3rd.csv', '3rd Grade'),
# ('meap 2009 3-9 demographics/4th.csv', '4th Grade'),
# ('meap 2009 3-9 demographics/5th.csv', '5th Grade'),
# ('meap 2009 3-9 demographics/6th.csv', '6th Grade'),
# ('meap 2009 3-9 demographics/7th.csv', '7th Grade'),
# ('meap 2009 3-9 demographics/8th.csv', '8th Grade'),
# ('meap 2009 3-9 demographics/9th.csv', '9th Grade')
]
state = ''
for file in files:
print "Starting " + file[1]
filename = file[0]
grade = file[1]
year = '2009-10'
dataset = 'MEAP'
state_exceeded = ''
district_exceeded = {'00000':'N/A'}
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
#print line
columns_to_float = [
'AvgSS',
'StdDev',
'Level_1',
'Level_2',
'Level_3',
'Level_4',
'Met/Exceeded_Standards',
'Did Not Meet_Standards'
]
for header in columns_to_float:
try:
line[header] = float(line[header])
except:
line[header] = None
try:
line['Number_Tested'] = int(line['Number_Tested'])
except:
line['Number_Tested'] = None
try:
line['Number_Included'] = int(line['Number_Included'])
except:
line['Number_Included'] = None
entity = ''
if '00000' not in line['District_Number']:
entity = get_district(line['District_Number'])
district_exceeded[line['District_Number']] = line['Met/Exceeded_Standards']
else:
entity = get_state()
state_exceeded = line['Met/Exceeded_Standards']
if entity != None:
entity = ensure_dataset(entity, '2009-10','MEAP')
# Checks if there already is 2009-10 MEAP data for this grade
# recorded for this entity
if grade not in entity['2009-10']['MEAP']:
entity['2009-10']['MEAP'][grade] = {}
# Records the MEAP subject
subject = line['Subject']
subgroup = slugify(line['Demographic_SubGroup']).replace("-","_")
if subject not in entity['2009-10']['MEAP'][grade]:
entity['2009-10']['MEAP'][grade][subject] = {}
entity['2009-10']['MEAP'][grade][subject][subgroup] = {
'value': line['Met/Exceeded_Standards'],
'district': district_exceeded[line['District_Number']],
'state':state_exceeded,
'subgroup':line['Demographic_SubGroup']
}
save(entity)
def headcount_bldg_k12():
dataset = 'headcount'
filename = 'demographics/fall_2009_headcount_bldg_total_enroll_317470_7.csv'
path = os.path.join(raw, filename)
district_file = 'demographics/fall_2009_headcount_dist_total_enroll_317526_7.csv'
district_file = os.path.join(raw, district_file)
state_file = 'demographics/fall_2009_headcount_state_total_enroll_317565_7.csv'
state_file = os.path.join(raw, state_file)
# Parse State results first. Used by school and district results
state_file = open(state_file, "r")
state_data = csv.DictReader(state_file, delimiter=',')
state = {}
for line in state_data:
state = line
# Then parse district results. Used by school results
districts = {}
district_file = open(district_file, "r")
district_data = csv.DictReader(district_file, delimiter=',')
for line in district_data:
line['tot_all'] = int(line['tot_all'])
districts[str(line['District Code'])] = line
district = get_district(line['District Code'])
if district != None:
total_enrollment = int(line['tot_all'])
for key, value in line.iteritems():
try:
# Here, we convert all the number values from strings to ints.
# try-catch is used to avoid manually listing each field.
line[key] = {
'value': int(value),
'percent': int( float(value) / float(total_enrollment) * 100),
'state': int(float(state[key]) / float(state['tot_all']) * 100)
}
except:
pass
district = ensure_dataset(district, '2009-10', dataset)
district['2009-10'][dataset] = line
save(district)
# Open the schools file
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
print districts
# Finally, parse school results
for line in raw_data:
entity = get_school(line['Building Code'])
# If the school is already in the database:
if entity != None:
entity = ensure_year(entity, '2009-10')
total_enrollment = int(line['tot_all'])
for key, value in line.iteritems():
try:
line[key] = {
'value': int(value),
'percent': int( float(value) / float(total_enrollment) * 100),
'district': int(float(districts[line['District Code']][key]['value']) / float(districts[line['District Code']]['tot_all']['value']) * 100),
'state': int(float(state[key]) / float(state['tot_all']) * 100)
}
except:
print key, value
entity['2009-10'][dataset] = line
save(entity)
else:
print line['Building Name'] + " not found."
def ACT_breakdowns():
print "Starting 2010 ACT breakdowns"
filename = 'ACT_School_and_District_Data_File_-_Spring_2010_MME_328541_7.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
districts = {}
state = {}
for line in raw_data:
entity = None
if '00000' not in line['Building Code']:
entity = get_school(line['Building Code'])
elif '00000' not in line['District Code']:
entity = get_district(line['District Code'])
else:
entity = get_state()
if entity is not None:
entity = ensure_year(entity, '2010')
entity = ensure_dataset(entity, '2010', 'ACT')
try:
line["Number Tested"] = int(line["Number Tested"])
except:
pass
try:
line["Mean"] = float(line["Mean"])
except:
pass
try:
line['Stdev'] = float(line['Stdev'])
except:
pass
subject = line['Subject']
entity['2010']['ACT'][subject] = line
if '00000' in line['Building Code'] and '00000' in line['District Code']:
# This is the state entry
state[line['Subject']] = line
elif '00000' in line['Building Code'] and '00000' not in line['District Code']:
# This line is for a district
if line['District Code'] not in districts:
districts[line['District Code']] = {}
line['state'] = state[line['Subject']]['Mean']
districts[line['District Code']][line['Subject']] = line
else:
# This line is for an individual school
try:
line['state'] = state[line['Subject']]['Mean']
line['district'] = districts[line['District Code']][line['Subject']]['Mean']
except:
print "No match for " + line['School Name']
save(entity)
def reduced_free_lunch_schools():
print "Starting 2009 Reduced/Free Lunch Eligible"
filename = 'reduced_lunch/Fall_2009_FRL_Bldg_318585_7/Bldg_K-12-Table 1.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
entity = get_school(line['Building Code'])
if entity != None:
try:
line['free_lunch_eligible'] = int(line['free_lunch_eligible'])
line['free_lunch_eligible_pct'] = int(float(line['free_lunch_eligible']) / float(line['Total Count']) * 100)
except:
pass
try:
line['reduced_price_lunch_eligible'] = int(line['reduced_price_lunch_eligible'])
line['reduced_price_lunch_eligible_pct'] = int(float(line['reduced_price_lunch_eligible'] / float(line['Total Count']) ) * 100)
except:
pass
entity = ensure_dataset(entity, '2009-10', 'reduced_lunch')
entity['2009-10']['reduced_lunch'] = line
save(entity)
def reduced_free_lunch_districts():
print "Starting 2009 Reduced/Free Lunch Eligible by District"
filename = 'reduced_lunch/Fall_2009_FRL_District_318584_7/Dist_K-12-Table 1.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
entity = get_district(line['District Code'])
if entity != None:
print "ok!"
try:
line['free_lunch_eligible'] = int(line['free_lunch_eligible'])
line['free_lunch_eligible_pct'] = int(float(line['free_lunch_eligible']) / float(line['Total Count']) * 100)
except:
print "hey!"
pass
try:
line['reduced_price_lunch_eligible'] = int(line['reduced_price_lunch_eligible'])
line['reduced_price_lunch_eligible_pct'] = int(float(line['reduced_price_lunch_eligible'] / float(line['Total Count']) ) * 100)
except:
print "hey!"
pass
entity = ensure_dataset(entity, '2009-10', 'reduced_lunch')
entity['2009-10']['reduced_lunch'] = line
save(entity)
def bulletin_1014():
# From Bulletin 1014- Michigan Public Schools Ranked by Select Financial Information
print "Starting Bulletin 1014"
filename = 'financials/2009_1014_bulletin.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
entity = get_district(line['DCODE'])
if entity != None:
entity = ensure_dataset(entity, '2009-10', 'ten_fourteen')
for key, value in line.iteritems():
try:
if '.' in value:
line[key] = float(value)
else:
line[key] = int(value)
except:
pass
entity['2009-10']['ten_fourteen'] = line
save(entity)
def bulletin_1011():
# From Bulletin 1014- Michigan Public Schools Ranked by Select Financial Information
print "Starting Bulletin 1011"
filename = 'financials/2008-9_1011_bulletin.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
districts = {}
for line in raw_data:
# save these before we convert them to int/float
district = line['DCODE']
field = line['FIELD']
for key, value in line.iteritems():
try:
if '.' in value:
line[key] = float(value)
else:
line[key] = int(value)
except:
pass
if district in districts:
districts[district][field] = line
else:
districts[district] = { field: line }
for district, data in districts.iteritems():
entity = get_district(district)
if entity != None:
entity = ensure_dataset(entity, '2008-9', 'ten_eleven')
entity['2008-9']['ten_eleven'] = data
save(entity)
def ayp_met():
print "Starting 2010 schools that met AYP"
filename = 'ayp/2010/Schools Met AYP-Table 1.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
entity = get_school(line['Building Code'])
if entity != None:
entity = ensure_dataset(entity, '2009-10', 'ayp')
entity['2009-10']['ayp'] = line
entity['2009-10']['ayp']['passed'] = True
save(entity)
def ayp_not_met():
print "Starting 2010 schools that did not meet AYP"
filename = 'ayp/2010/Schools Not Met AYP-Table 1.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
for line in raw_data:
entity = get_school(line['Building Code'])
if entity != None:
entity = ensure_dataset(entity, '2009-10', 'ayp')
entity['2009-10']['ayp'] = line
entity['2009-10']['ayp']['passed'] = False
save(entity)
def meap_longitudinal():
print "Starting longitudinal MEAP data"
filename = 'meap_longitudinal/2005-2010.csv'
path = os.path.join(raw, filename)
f = open(path, "r")
raw_data = csv.DictReader(f, delimiter=',')
all_data = {}
for line in raw_data:
temp_line = {}
for key, value in line.iteritems():
# Check if the key represents a datapoint
# for example, "08-All" is the key for the percent of all students
# who passed in 2008.
parts = key.split('-')
if len(parts) > 1:
if parts[1] not in temp_line:
# ensure that there is a key for the demographic.
temp_line[parts[1]] = {}
# save the
# looks like:
# { ...
# "[grade]": {
# "All": { "08": 89, "07": 55, ... }
# "Male": { "08": 73, ... }
# }
# ... }
try:
temp_line[parts[1]][parts[0]] = int(value)
except:
# Some of the values are NULL because there are two few
# students to report values without violating privacy.
temp_line[parts[1]][parts[0]] = None
# Temporarily store the record using (school, district) to identify.
if (line['Building Code'], line['District Code']) not in all_data:
all_data[(line['Building Code'], line['District Code'])] = {}
full_record = all_data[(line['Building Code'], line['District Code'])]
if line['Grade'] not in full_record:
full_record[line['Grade']] = {}
full_record[line['Grade']][line['Subject']] = temp_line
all_data[(line['Building Code'], line['District Code'])] = full_record
# Now we need to save all the data.
# Remeber, records are keyed in (Building Code, District Code) pairs
for key, value in all_data.iteritems():
record = None
if '00000' in key[0]:
if '00000' in key[1]:
# This is the state record
record = get_state()
else:
# This is a district record.
record = get_district(key[1])
else:
# This is a school record
record = get_school(key[0])
if record is not None:
record['MEAP'] = value
save(record)
else:
print key
def connection_test():
print MONGO
state = get_state()
#remove_all()
#load_basic_data()
#school_safety()
#aggregate_school_safety()
meap()
#meap_district()
#headcount_bldg_k12()
#reduced_free_lunch_schools()
#reduced_free_lunch_districts()
#bulletin_1014()
#bulletin_1011()
#ACT_breakdowns()
#generate_grade_strings()
#ayp_met()
#ayp_not_met()
#meap_longitudinal()
|
hampelm/Michigan-School-Data
|
parser/parser.py
|
Python
|
bsd-3-clause
| 33,174
|
"""Provides managed registration services on behalf of :func:`.listen`
arguments.
By "managed registration", we mean that event listening functions and
other objects can be added to various collections in such a way that their
membership in all those collections can be revoked at once, based on
an equivalent :class:`._EventKey`.
"""
from __future__ import absolute_import
import weakref
import collections
from .. import exc
_key_to_collection = collections.defaultdict(dict)
"""
Given an original listen() argument, can locate all
listener collections and the listener fn contained
(target, identifier, fn) -> {
ref(listenercollection) -> ref(listener_fn)
ref(listenercollection) -> ref(listener_fn)
ref(listenercollection) -> ref(listener_fn)
}
"""
_collection_to_key = collections.defaultdict(dict)
"""
Given a _ListenerCollection or _DispatchDescriptor, can locate
all the original listen() arguments and the listener fn contained
ref(listenercollection) -> {
ref(listener_fn) -> (target, identifier, fn),
ref(listener_fn) -> (target, identifier, fn),
ref(listener_fn) -> (target, identifier, fn),
}
"""
def _collection_gced(ref):
# defaultdict, so can't get a KeyError
if not _collection_to_key or ref not in _collection_to_key:
return
listener_to_key = _collection_to_key.pop(ref)
for key in listener_to_key.values():
if key in _key_to_collection:
# defaultdict, so can't get a KeyError
dispatch_reg = _key_to_collection[key]
dispatch_reg.pop(ref)
if not dispatch_reg:
_key_to_collection.pop(key)
def _stored_in_collection(event_key, owner):
key = event_key._key
dispatch_reg = _key_to_collection[key]
owner_ref = owner.ref
listen_ref = weakref.ref(event_key._listen_fn)
if owner_ref in dispatch_reg:
assert dispatch_reg[owner_ref] == listen_ref
else:
dispatch_reg[owner_ref] = listen_ref
listener_to_key = _collection_to_key[owner_ref]
listener_to_key[listen_ref] = key
def _removed_from_collection(event_key, owner):
key = event_key._key
dispatch_reg = _key_to_collection[key]
listen_ref = weakref.ref(event_key._listen_fn)
owner_ref = owner.ref
dispatch_reg.pop(owner_ref, None)
if not dispatch_reg:
del _key_to_collection[key]
if owner_ref in _collection_to_key:
listener_to_key = _collection_to_key[owner_ref]
listener_to_key.pop(listen_ref)
def _stored_in_collection_multi(newowner, oldowner, elements):
if not elements:
return
oldowner = oldowner.ref
newowner = newowner.ref
old_listener_to_key = _collection_to_key[oldowner]
new_listener_to_key = _collection_to_key[newowner]
for listen_fn in elements:
listen_ref = weakref.ref(listen_fn)
key = old_listener_to_key[listen_ref]
dispatch_reg = _key_to_collection[key]
if newowner in dispatch_reg:
assert dispatch_reg[newowner] == listen_ref
else:
dispatch_reg[newowner] = listen_ref
new_listener_to_key[listen_ref] = key
def _clear(owner, elements):
if not elements:
return
owner = owner.ref
listener_to_key = _collection_to_key[owner]
for listen_fn in elements:
listen_ref = weakref.ref(listen_fn)
key = listener_to_key[listen_ref]
dispatch_reg = _key_to_collection[key]
dispatch_reg.pop(owner, None)
if not dispatch_reg:
del _key_to_collection[key]
class _EventKey(object):
"""Represent :func:`.listen` arguments.
"""
def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap=None):
self.target = target
self.identifier = identifier
self.fn = fn
self.fn_wrap = _fn_wrap
self.dispatch_target = dispatch_target
@property
def _key(self):
return (id(self.target), self.identifier, id(self.fn))
def with_wrapper(self, fn_wrap):
if fn_wrap is self._listen_fn:
return self
else:
return _EventKey(
self.target,
self.identifier,
self.fn,
self.dispatch_target,
_fn_wrap=fn_wrap
)
def with_dispatch_target(self, dispatch_target):
if dispatch_target is self.dispatch_target:
return self
else:
return _EventKey(
self.target,
self.identifier,
self.fn,
dispatch_target,
_fn_wrap=self.fn_wrap
)
def listen(self, *args, **kw):
self.dispatch_target.dispatch._listen(self, *args, **kw)
def remove(self):
key = self._key
if key not in _key_to_collection:
raise exc.InvalidRequestError(
"No listeners found for event %s / %r / %s " %
(self.target, self.identifier, self.fn)
)
dispatch_reg = _key_to_collection.pop(key)
for collection_ref, listener_ref in dispatch_reg.items():
collection = collection_ref()
listener_fn = listener_ref()
if collection is not None and listener_fn is not None:
collection.remove(self.with_wrapper(listener_fn))
def contains(self):
"""Return True if this event key is registered to listen.
"""
return self._key in _key_to_collection
def base_listen(self, propagate=False, insert=False,
named=False):
target, identifier, fn = \
self.dispatch_target, self.identifier, self._listen_fn
dispatch_descriptor = getattr(target.dispatch, identifier)
fn = dispatch_descriptor._adjust_fn_spec(fn, named)
self = self.with_wrapper(fn)
if insert:
dispatch_descriptor.\
for_modify(target.dispatch).insert(self, propagate)
else:
dispatch_descriptor.\
for_modify(target.dispatch).append(self, propagate)
@property
def _listen_fn(self):
return self.fn_wrap or self.fn
def append_value_to_list(self, owner, list_, value):
_stored_in_collection(self, owner)
list_.append(value)
def append_to_list(self, owner, list_):
_stored_in_collection(self, owner)
list_.append(self._listen_fn)
def remove_from_list(self, owner, list_):
_removed_from_collection(self, owner)
list_.remove(self._listen_fn)
def prepend_to_list(self, owner, list_):
_stored_in_collection(self, owner)
list_.insert(0, self._listen_fn)
|
alex/sqlalchemy
|
lib/sqlalchemy/event/registry.py
|
Python
|
mit
| 6,907
|
import json
from urllib import urlencode
from twisted.web import http
from twisted.trial import unittest
from twisted.internet.defer import inlineCallbacks
from diamondash import utils
from diamondash.tests.utils import MockHttpServer
class UtilsTestCase(unittest.TestCase):
def test_isint(self):
"""
Should check if a number is equivalent to an integer
"""
self.assertTrue(utils.isint(1))
self.assertTrue(utils.isint(2.000))
self.assertTrue(utils.isint(82734.0000000))
self.assertTrue(utils.isint(-213.0))
self.assertFalse(utils.isint(23123.123123))
def test_slugify(self):
"""Should change 'SomethIng_lIke tHis' to 'something-like-this'"""
self.assertEqual(utils.slugify(u'SoMeThing_liKe!tHis'),
'something-like-this')
self.assertEqual(utils.slugify(u'Godspeed You! Black Emperor'),
'godspeed-you-black-emperor')
def test_parse_interval(self):
"""
Multiplier-suffixed intervals should be turned into integers correctly.
"""
self.assertEqual(2, utils.parse_interval(2))
self.assertEqual(2, utils.parse_interval("2"))
self.assertEqual(2 * 1000, utils.parse_interval("2s"))
self.assertEqual(120 * 1000, utils.parse_interval("2m"))
self.assertEqual(7200 * 1000, utils.parse_interval("2h"))
self.assertEqual(86400 * 1000 * 2, utils.parse_interval("2d"))
def test_add_dicts(self):
original = {'a': 1}
defaults = {'a': 0, 'b': 2}
self.assertEqual(
utils.add_dicts(defaults, original), {'a': 1, 'b': 2})
self.assertEqual(original, {'a': 1})
self.assertEqual(defaults, {'a': 0, 'b': 2})
original = {'a': 1}
defaults1 = {'a': 0, 'b': 2}
defaults2 = {'b': 3, 'c': 4}
self.assertEqual(
utils.add_dicts(defaults1, defaults2, original),
{'a': 1, 'b': 3, 'c': 4})
self.assertEqual(original, {'a': 1})
self.assertEqual(defaults1, {'a': 0, 'b': 2})
self.assertEqual(defaults2, {'b': 3, 'c': 4})
def test_round_time(self):
self.assertEqual(utils.round_time(2, 5), 0)
self.assertEqual(utils.round_time(3, 5), 5)
self.assertEqual(utils.round_time(5, 5), 5)
self.assertEqual(utils.round_time(6, 5), 5)
self.assertEqual(utils.round_time(7, 5), 5)
self.assertEqual(utils.round_time(8, 5), 10)
self.assertEqual(utils.round_time(9, 5), 10)
self.assertEqual(utils.round_time(10, 5), 10)
self.assertEqual(utils.round_time(3, 10), 0)
self.assertEqual(utils.round_time(5, 10), 10)
self.assertEqual(utils.round_time(10, 10), 10)
self.assertEqual(utils.round_time(12, 10), 10)
self.assertEqual(utils.round_time(13, 10), 10)
self.assertEqual(utils.round_time(15, 10), 20)
self.assertEqual(utils.round_time(18, 10), 20)
def test_round_time_relative(self):
self.assertEqual(utils.round_time(1, 5, 3), 3)
self.assertEqual(utils.round_time(2, 5, 3), 3)
self.assertEqual(utils.round_time(3, 5, 3), 3)
self.assertEqual(utils.round_time(5, 5, 3), 3)
self.assertEqual(utils.round_time(6, 5, 3), 8)
self.assertEqual(utils.round_time(7, 5, 3), 8)
self.assertEqual(utils.round_time(8, 5, 3), 8)
self.assertEqual(utils.round_time(9, 5, 3), 8)
self.assertEqual(utils.round_time(10, 5, 3), 8)
self.assertEqual(utils.round_time(11, 5, 3), 13)
self.assertEqual(utils.round_time(13, 5, 3), 13)
self.assertEqual(utils.round_time(14, 5, 3), 13)
self.assertEqual(utils.round_time(1, 5, 18), 3)
self.assertEqual(utils.round_time(2, 5, 18), 3)
self.assertEqual(utils.round_time(3, 5, 18), 3)
self.assertEqual(utils.round_time(5, 5, 18), 3)
self.assertEqual(utils.round_time(6, 5, 18), 8)
self.assertEqual(utils.round_time(7, 5, 18), 8)
self.assertEqual(utils.round_time(8, 5, 18), 8)
self.assertEqual(utils.round_time(9, 5, 18), 8)
self.assertEqual(utils.round_time(10, 5, 18), 8)
self.assertEqual(utils.round_time(11, 5, 18), 13)
self.assertEqual(utils.round_time(13, 5, 18), 13)
self.assertEqual(utils.round_time(14, 5, 18), 13)
def test_floor_time(self):
self.assertEqual(utils.floor_time(2, 5), 0)
self.assertEqual(utils.floor_time(3, 5), 0)
self.assertEqual(utils.floor_time(5, 5), 5)
self.assertEqual(utils.floor_time(6, 5), 5)
self.assertEqual(utils.floor_time(7, 5), 5)
self.assertEqual(utils.floor_time(8, 5), 5)
self.assertEqual(utils.floor_time(9, 5), 5)
self.assertEqual(utils.floor_time(10, 5), 10)
self.assertEqual(utils.floor_time(3, 10), 0)
self.assertEqual(utils.floor_time(5, 10), 0)
self.assertEqual(utils.floor_time(10, 10), 10)
self.assertEqual(utils.floor_time(12, 10), 10)
self.assertEqual(utils.floor_time(13, 10), 10)
self.assertEqual(utils.floor_time(15, 10), 10)
self.assertEqual(utils.floor_time(18, 10), 10)
self.assertEqual(utils.floor_time(22, 10), 20)
def test_floor_time_relative(self):
self.assertEqual(utils.floor_time(1, 5, 3), 0)
self.assertEqual(utils.floor_time(2, 5, 3), 0)
self.assertEqual(utils.floor_time(3, 5, 3), 3)
self.assertEqual(utils.floor_time(5, 5, 3), 3)
self.assertEqual(utils.floor_time(6, 5, 3), 3)
self.assertEqual(utils.floor_time(7, 5, 3), 3)
self.assertEqual(utils.floor_time(8, 5, 3), 8)
self.assertEqual(utils.floor_time(9, 5, 3), 8)
self.assertEqual(utils.floor_time(10, 5, 3), 8)
self.assertEqual(utils.floor_time(11, 5, 3), 8)
self.assertEqual(utils.floor_time(13, 5, 3), 13)
self.assertEqual(utils.floor_time(14, 5, 3), 13)
self.assertEqual(utils.floor_time(1, 5, 18), 0)
self.assertEqual(utils.floor_time(2, 5, 18), 0)
self.assertEqual(utils.floor_time(3, 5, 18), 3)
self.assertEqual(utils.floor_time(5, 5, 18), 3)
self.assertEqual(utils.floor_time(6, 5, 18), 3)
self.assertEqual(utils.floor_time(7, 5, 18), 3)
self.assertEqual(utils.floor_time(8, 5, 18), 8)
self.assertEqual(utils.floor_time(9, 5, 18), 8)
self.assertEqual(utils.floor_time(10, 5, 18), 8)
self.assertEqual(utils.floor_time(11, 5, 18), 8)
self.assertEqual(utils.floor_time(13, 5, 18), 13)
self.assertEqual(utils.floor_time(14, 5, 18), 13)
class HttpUtilsTestCase(unittest.TestCase):
def setUp(self):
self.set_response_data("", http.OK, {})
self.server = MockHttpServer(handler=self.handle_request)
return self.server.start()
def tearDown(self):
return self.server.stop()
def set_response_data(self, body, code, headers):
self.response_body = body
self.response_code = code
self.response_headers = headers
@inlineCallbacks
def assert_last_request(self, args, data="", headers={}, method='GET'):
request = yield self.server.queue.get()
self.assertEqual(request.content.read(), data)
self.assertEqual(request.method, method)
self.assertEqual(request.args, args)
for k, v in headers.iteritems():
self.assertEqual(request.requestHeaders.getRawHeaders(k), v)
def handle_request(self, request):
request.setResponseCode(self.response_code)
for h_name, h_values in self.response_headers.iteritems():
request.responseHeaders.setRawHeaders(h_name, h_values)
self.server.queue.put(request)
return self.response_body
def assert_happy_response(self, body, code=http.OK, headers={}):
self.set_response_data(body, code, headers)
d = utils.http_request(self.server.url)
def assert_response(response):
self.assertEqual(response['body'], body)
self.assertEqual(response['status'], str(code))
for k, v in headers.iteritems():
self.assertEqual(response['headers'][k], v)
d.addCallback(assert_response)
return d
def assert_response(self, body, code=http.OK, headers={}):
self.set_response_data(body, code, headers)
d = utils.http_request(self.server.url)
def got_response(response):
self.assertEqual(response['body'], body)
self.assertEqual(response['status'], str(code))
for k, v in headers.iteritems():
self.assertEqual(response['headers'][k], v)
d.addCallback(got_response)
return d
@inlineCallbacks
def test_http_request_response_handling(self):
yield self.assert_response("")
yield self.assert_response(
json.dumps({'a': [1, 2]}),
code=http.CREATED,
headers={'luke': ['Skywalker']})
def test_http_request_for_GET(self):
utils.http_request(
"%s?%s" % (self.server.url, urlencode({'a': 'lerp', 'b': 'larp'})),
headers={'Han': 'Solo', 'Mon': 'Mothma'},
method='GET')
return self.assert_last_request(
args={'a': ['lerp'], 'b': ['larp']},
headers={'Han': ['Solo'], 'Mon': ['Mothma']},
method='GET')
def test_http_request_for_POST(self):
data = json.dumps({'a': 'lerp', 'b': 'larp'})
utils.http_request(self.server.url, data=data, method='POST')
return self.assert_last_request(args={}, data=data, method='POST')
def test_http_request_for_DELETE(self):
data = json.dumps({'a': 'lerp', 'b': 'larp'})
utils.http_request(self.server.url, data=data, method='DELETE')
return self.assert_last_request(args={}, data=data, method='DELETE')
|
praekelt/diamondash
|
diamondash/tests/test_utils.py
|
Python
|
bsd-3-clause
| 9,903
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Run the BOLD reference+mask workflow"""
import os
def get_parser():
"""Build parser object."""
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter, RawDescriptionHelpFormatter
parser = ArgumentParser(
description="""NiWorkflows Utilities""", formatter_class=RawTextHelpFormatter
)
subparsers = parser.add_subparsers(dest="command")
be_parser = subparsers.add_parser(
"brain-extract",
formatter_class=RawDescriptionHelpFormatter,
description="""Execute brain extraction and related operations (e.g., \
intensity nonuniformity correction, robust averaging, etc.)""",
)
be_parser.add_argument("input_file", action="store", help="the input file")
be_parser.add_argument("out_path", action="store", help="the output directory")
be_parser.add_argument(
"--modality",
"-m",
action="store",
choices=("bold", "t1w"),
default="bold",
help="the input file",
)
parser.add_argument(
"--omp-nthreads",
action="store",
type=int,
default=os.cpu_count(),
help="Number of CPUs available to individual processes",
)
parser.add_argument(
"--nprocs",
action="store",
type=int,
default=os.cpu_count(),
help="Number of processes that may run in parallel",
)
return parser
def main(args=None):
"""Entry point."""
from nipype.utils.filemanip import hash_infile
from ..func.util import init_bold_reference_wf
opts = get_parser().parse_args(args=args)
wf = init_bold_reference_wf(
opts.omp_nthreads, gen_report=True, name=hash_infile(opts.input_file),
)
wf.inputs.inputnode.bold_file = opts.input_file
wf.base_dir = os.getcwd()
plugin = {
"plugin": "MultiProc",
"plugin_args": {"nprocs": opts.nprocs},
}
if opts.nprocs < 2:
plugin = {"plugin": "Linear"}
wf.run(**plugin)
if __name__ == "__main__":
from sys import argv
main(args=argv[1:])
|
poldracklab/niworkflows
|
niworkflows/cli/boldref.py
|
Python
|
bsd-3-clause
| 2,183
|
# -*- coding: utf8 -*-
from lib.constants import ALL_CURRENCIES
from tower import ugettext_lazy as _lazy
# From page 10 of the Mozilla Exporter API docs v1.0.0
#
# BDT not in docs, but added in for bug 1043481.
BANGO_CURRENCIES = ['AUD', 'BDT', 'CAD', 'CHF', 'COP', 'DKK', 'EGP', 'EUR',
'GBP', 'IDR', 'MXN', 'MYR', 'NOK', 'NZD', 'PHP', 'PLN',
'QAR', 'SEK', 'SGD', 'THB', 'USD', 'ZAR']
BANGO_CURRENCIES = dict((k, ALL_CURRENCIES[k]) for k in BANGO_CURRENCIES)
BANGO_OUTPAYMENT_CURRENCIES = ['EUR', 'GBP', 'USD']
BANGO_OUTPAYMENT_CURRENCIES = [(k, ALL_CURRENCIES[k])
for k in BANGO_OUTPAYMENT_CURRENCIES]
BANGO_COUNTRIES = [
('AFG', _lazy(u'Afghanistan')),
('ALA', _lazy(u'Åland Islands')),
('ALB', _lazy(u'Albania')),
('DZA', _lazy(u'Algeria')),
('ASM', _lazy(u'American Samoa')),
('AND', _lazy(u'Andorra')),
('AGO', _lazy(u'Angola')),
('AIA', _lazy(u'Anguilla')),
('ATA', _lazy(u'Antarctica')),
('ATG', _lazy(u'Antigua and Barbuda')),
('ARG', _lazy(u'Argentina')),
('ARM', _lazy(u'Armenia')),
('ABW', _lazy(u'Aruba')),
('AUS', _lazy(u'Australia')),
('AUT', _lazy(u'Austria')),
('AZE', _lazy(u'Azerbaijan')),
('BHS', _lazy(u'Bahamas')),
('BHR', _lazy(u'Bahrain')),
('BGD', _lazy(u'Bangladesh')),
('BRB', _lazy(u'Barbados')),
('BLR', _lazy(u'Belarus')),
('BEL', _lazy(u'Belgium')),
('BLZ', _lazy(u'Belize')),
('BEN', _lazy(u'Benin')),
('BMU', _lazy(u'Bermuda')),
('BTN', _lazy(u'Bhutan')),
('BOL', _lazy(u'Bolivia')),
('BES', _lazy(u'Bonaire, Saint Eustatius and Saba')),
('BIH', _lazy(u'Bosnia and Herzegovina')),
('BWA', _lazy(u'Botswana')),
('BVT', _lazy(u'Bouvet Island')),
('BRA', _lazy(u'Brazil')),
('IOT', _lazy(u'British Indian Ocean Territory')),
('BRN', _lazy(u'Brunei Darussalam')),
('BGR', _lazy(u'Bulgaria')),
('BFA', _lazy(u'Burkina Faso')),
('BDI', _lazy(u'Burundi')),
('KHM', _lazy(u'Cambodia')),
('CMR', _lazy(u'Cameroon')),
('CAN', _lazy(u'Canada')),
('CPV', _lazy(u'Cape Verde')),
('CYM', _lazy(u'Cayman Islands')),
('CAF', _lazy(u'Central African Republic')),
('TCD', _lazy(u'Chad')),
('CHL', _lazy(u'Chile')),
('CHN', _lazy(u'China')),
('CXR', _lazy(u'Christmas Island')),
('CCK', _lazy(u'Cocos (Keeling) Islands')),
('COL', _lazy(u'Colombia')),
('COM', _lazy(u'Comoros')),
('COG', _lazy(u'Congo')),
('COD', _lazy(u'Congo, Democratic Republic')),
('COK', _lazy(u'Cook Islands')),
('CRI', _lazy(u'Costa Rica')),
('CIV', _lazy(u"Côte d'Ivoire")),
('HRV', _lazy(u'Croatia')),
('CUB', _lazy(u'Cuba')),
('CUW', _lazy(u'Curaçao')),
('CYP', _lazy(u'Cyprus')),
('CZE', _lazy(u'Czech Republic')),
('DNK', _lazy(u'Denmark Do')),
('DJI', _lazy(u'Djibouti')),
('DMA', _lazy(u'Dominica')),
('DOM', _lazy(u'Dominican Republic')),
('ECU', _lazy(u'Ecuador')),
('EGY', _lazy(u'Egypt')),
('SLV', _lazy(u'El Salvador')),
('GNQ', _lazy(u'Equatorial Guinea')),
('ERI', _lazy(u'Eritrea')),
('EST', _lazy(u'Estonia')),
('ETH', _lazy(u'Ethiopia')),
('FLK', _lazy(u'Falkland Islands (Malvinas)')),
('FRO', _lazy(u'Faroe Islands')),
('FJI', _lazy(u'Fiji')),
('FIN', _lazy(u'Finland')),
('FRA', _lazy(u'France')),
('GUF', _lazy(u'French Guiana')),
('PYF', _lazy(u'French Polynesia')),
('ATF', _lazy(u'French Southern Territories')),
('GAB', _lazy(u'Gabon')),
('GMB', _lazy(u'Gambia')),
('GEO', _lazy(u'Georgia')),
('DEU', _lazy(u'Germany')),
('GHA', _lazy(u'Ghana')),
('GIB', _lazy(u'Gibraltar')),
('GRC', _lazy(u'Greece')),
('GRL', _lazy(u'Greenland')),
('GRD', _lazy(u'Grenada')),
('GLP', _lazy(u'Guadeloupe')),
('GUM', _lazy(u'Guam')),
('GTM', _lazy(u'Guatemala')),
('GGY', _lazy(u'Guernsey')),
('GIN', _lazy(u'Guinea')),
('GNB', _lazy(u'Guinea-Bissau')),
('GUY', _lazy(u'Guyana')),
('HTI', _lazy(u'Haiti')),
('HMD', _lazy(u'Heard and McDonald Islands')),
('VAT', _lazy(u'Holy See (Vatican City State)')),
('HND', _lazy(u'Honduras')),
('HKG', _lazy(u'Hong Kong')),
('HUN', _lazy(u'Hungary')),
('ISL', _lazy(u'Iceland')),
('IND', _lazy(u'India')),
('IDN', _lazy(u'Indonesia')),
('IRN', _lazy(u'Iran, Islamic Republic of')),
('IRQ', _lazy(u'Iraq')),
('IRL', _lazy(u'Ireland')),
('IMN', _lazy(u'Isle of Man')),
('ISR', _lazy(u'Israel')),
('ITA', _lazy(u'Italy')),
('JAM', _lazy(u'Jamaica')),
('JPN', _lazy(u'Japan')),
('JEY', _lazy(u'Jersey')),
('JOR', _lazy(u'Jordan')),
('KAZ', _lazy(u'Kazakhstan')),
('KEN', _lazy(u'Kenya')),
('KIR', _lazy(u'Kiribati')),
('PRK', _lazy(u"Korea, Democratic People's Rep")),
('KOR', _lazy(u'Korea, Republic of')),
('KOS', _lazy(u'Kosovo')),
('KWT', _lazy(u'Kuwait')),
('KGZ', _lazy(u'Kyrgyzstan')),
('LAO', _lazy(u"Lao People's Democratic Rep")),
('LVA', _lazy(u'Latvia')),
('LBN', _lazy(u'Lebanon')),
('LSO', _lazy(u'Lesotho')),
('LBR', _lazy(u'Liberia')),
('LBY', _lazy(u'Libyan Arab Jamahiriya')),
('LIE', _lazy(u'Liechtenstei')),
('LTU', _lazy(u'Lithuania')),
('LUX', _lazy(u'Luxembourg')),
('MAC', _lazy(u'Macao')),
('MKD', _lazy(u'Macedonia, Former Yugoslav Rep')),
('MDG', _lazy(u'Madagascar')),
('MWI', _lazy(u'Malawi')),
('MYS', _lazy(u'Malaysia')),
('MDV', _lazy(u'Maldives')),
('MLI', _lazy(u'Mali')),
('MLT', _lazy(u'Malta')),
('MHL', _lazy(u'Marshall Islands')),
('MTQ', _lazy(u'Martinique')),
('MRT', _lazy(u'Mauritania')),
('MUS', _lazy(u'Mauritius')),
('MYT', _lazy(u'Mayotte')),
('MEX', _lazy(u'Mexico')),
('FSM', _lazy(u'Micronesia, Federated States of')),
('MDA', _lazy(u'Moldova, Republic of')),
('MCO', _lazy(u'Monaco')),
('MNG', _lazy(u'Mongolia')),
('MNE', _lazy(u'Montenegro')),
('MSR', _lazy(u'Montserrat')),
('MAR', _lazy(u'Morocco')),
('MOZ', _lazy(u'Mozambique')),
('MMR', _lazy(u'Myanmar')),
('NAM', _lazy(u'Namibia')),
('NRU', _lazy(u'Nauru')),
('NPL', _lazy(u'Nepal')),
('NLD', _lazy(u'Netherlands')),
('NCL', _lazy(u'New Caledonia')),
('NZL', _lazy(u'New Zealand')),
('NIC', _lazy(u'Nicaragua')),
('NER', _lazy(u'Niger')),
('NGA', _lazy(u'Nigeria')),
('NIU', _lazy(u'Niue')),
('NFK', _lazy(u'Norfolk Island')),
('MNP', _lazy(u'Northern Mariana Islands')),
('NOR', _lazy(u'Norway')),
('OMN', _lazy(u'Oman')),
('PAK', _lazy(u'Pakistan')),
('PLW', _lazy(u'Palau')),
('PSE', _lazy(u'Palestinian Territory, Occupied')),
('PAN', _lazy(u'Panama')),
('PNG', _lazy(u'Papua New Guinea')),
('PRY', _lazy(u'Paraguay')),
('PER', _lazy(u'Peru')),
('PHL', _lazy(u'Philippines')),
('PCN', _lazy(u'Pitcairn')),
('POL', _lazy(u'Poland')),
('PRT', _lazy(u'Portugal')),
('PRI', _lazy(u'Puerto Rico')),
('QAT', _lazy(u'Qatar')),
('REU', _lazy(u'Réunion')),
('ROU', _lazy(u'Romania')),
('RUS', _lazy(u'Russian Federation')),
('RWA', _lazy(u'Rwanda')),
('BLM', _lazy(u'Saint Barthélemy')),
('SHN', _lazy(u'Saint Helena')),
('KNA', _lazy(u'Saint Kitts and Nevis')),
('LCA', _lazy(u'Saint Lucia')),
('MAF', _lazy(u'Saint Martin')),
('SPM', _lazy(u'Saint Pierre and Miquelon')),
('VCT', _lazy(u'Saint Vincent and the Grenadines')),
('WSM', _lazy(u'Samoa')),
('SMR', _lazy(u'San Marino')),
('STP', _lazy(u'Sao Tome and Principe')),
('SAU', _lazy(u'Saudi Arabia')),
('SEN', _lazy(u'Senega')),
('SRB', _lazy(u'Serbia')),
('SCG', _lazy(u'Serbia and Montenegro')),
('SYC', _lazy(u'Seychelles')),
('SLE', _lazy(u'Sierra Leone')),
('SGP', _lazy(u'Singapore')),
('SXM', _lazy(u'Sint Maarten (Dutch part)')),
('SVK', _lazy(u'Slovakia')),
('SVN', _lazy(u'Slovenia')),
('SLB', _lazy(u'Solomon Islands')),
('SOM', _lazy(u'Somalia')),
('ZAF', _lazy(u'South Africa')),
('SGS', _lazy(u'South Georgia and the South Sandwich Islands')),
('SSD', _lazy(u'South Sudan')),
('ESP', _lazy(u'Spain')),
('LKA', _lazy(u'Sri Lanka')),
('SDN', _lazy(u'Sudan')),
('SUR', _lazy(u'Suriname')),
('SJM', _lazy(u'Svalbard and Jan Mayen')),
('SWZ', _lazy(u'Swaziland')),
('SWE', _lazy(u'Sweden')),
('CHE', _lazy(u'Switzerland')),
('SYR', _lazy(u'Syrian Arab Republic')),
('TWN', _lazy(u'Taiwan, Province of China')),
('TJK', _lazy(u'Tajikistan')),
('TZA', _lazy(u'Tanzania, United Republic of')),
('THA', _lazy(u'Thailand')),
('TLS', _lazy(u'Timor-Leste')),
('TGO', _lazy(u'Togo')),
('TKL', _lazy(u'Tokelau')),
('TON', _lazy(u'Tonga')),
('TTO', _lazy(u'Trinidad and Tobago')),
('TUN', _lazy(u'Tunisia')),
('TUR', _lazy(u'Turkey')),
('TKM', _lazy(u'Turkmenistan')),
('TCA', _lazy(u'Turks and Caicos Islands')),
('TUV', _lazy(u'Tuvalu')),
('UGA', _lazy(u'Uganda')),
('UKR', _lazy(u'Ukraine')),
('ARE', _lazy(u'United Arab Emirates')),
('GBR', _lazy(u'United Kingdom')),
('USA', _lazy(u'United States')),
('UMI', _lazy(u'United States Minor Outlying Islands')),
('URY', _lazy(u'Uruguay')),
('UZB', _lazy(u'Uzbekistan')),
('VUT', _lazy(u'Vanuatu')),
('VEN', _lazy(u'Venezuela, Bolivarian Republic of')),
('VNM', _lazy(u'Viet Nam')),
('VGB', _lazy(u'Virgin Islands, British')),
('VIR', _lazy(u'Virgin Islands, U.S.')),
('WLF', _lazy(u'Wallis and Futuna')),
('ESH', _lazy(u'Western Sahara')),
('YEM', _lazy(u'Yemen')),
('ZMB', _lazy(u'Zambia')),
('ZWE', _lazy(u'Zimbabwe')),
]
|
ngokevin/zamboni
|
mkt/constants/bango.py
|
Python
|
bsd-3-clause
| 9,752
|
import sublime, sublime_plugin
class MytestCommand(sublime_plugin.TextCommand):
def run(self, edit):
# self.view.insert(edit, 0, "Hello, World! ")
self.view.run_command("show_panel", {"panel": "console"}) # "toggle": 0})
# print self.view.file_name(), "is now the active view"
class SublimeOnSave(sublime_plugin.EventListener):
def on_pre_save(self, view):
print "on_pre_save"
# view.run_command('mytest')
view.run_command("run_multiple_commands", {"commands": [{"command": "show_panel", "args": {"panel": "console"}, "context": "window"}]})
# print "filename is: "+str(view.file_name())
def on_post_save(self, view):
print "on_post_save"
# print "filename is: "+str(view.file_name())
def on_activated(self, view):
print "view activated"
# view.run_command("run_multiple_commands")
# view.run_command("mytest")
# Takes an array of commands (same as those you'd provide to a key binding) with
# an optional context (defaults to view commands) & runs each command in order.
# Valid contexts are 'text', 'window', and 'app' for running a TextCommand,
# WindowCommands, or ApplicationCommand respectively.
#
# The run_multiple_commands.py has been developed by Nilium - see
# http://www.sublimetext.com/forum/viewtopic.php?f=5&t=8677 for a discussion.
class RunMultipleCommandsCommand(sublime_plugin.TextCommand):
def exec_command(self, command):
if not 'command' in command:
raise Exception('No command name provided.')
args = None
if 'args' in command:
args = command['args']
# default context is the view since it's easiest to get the other contexts
# from the view
context = self.view
if 'context' in command:
context_name = command['context']
if context_name == 'window':
context = context.window()
elif context_name == 'app':
context = sublime
elif context_name == 'text':
pass
else:
raise Exception('Invalid command context "'+context_name+'".')
# skip args if not needed
if args is None:
context.run_command(command['command'])
# uncomment the next line, if you want to add a delay to the execution
# sublime.set_timeout( lambda: context.run_command(command['command']), 2000 )
else:
context.run_command(command['command'], args)
# uncomment the next line, if you want to add a delay to the execution
# sublime.set_timeout( lambda: context.run_command(command['command'], args), 2000 )
def run(self, edit, commands = None):
print "running multiple commands"
if commands is None:
return # not an error
for command in commands:
self.exec_command(command)
|
Dancore/SubTrigger
|
SubTrigger.py
|
Python
|
gpl-3.0
| 2,599
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from operator import attrgetter
from flask import current_app
from airflow import DAG
from airflow.api_connexion import security
from airflow.api_connexion.exceptions import BadRequest, NotFound
from airflow.api_connexion.schemas.task_schema import TaskCollection, task_collection_schema, task_schema
from airflow.exceptions import TaskNotFound
from airflow.security import permissions
@security.requires_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
def get_task(dag_id, task_id):
"""Get simplified representation of a task."""
dag: DAG = current_app.dag_bag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found")
try:
task = dag.get_task(task_id=task_id)
except TaskNotFound:
raise NotFound("Task not found")
return task_schema.dump(task)
@security.requires_access(
[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE),
]
)
def get_tasks(dag_id, order_by='task_id'):
"""Get tasks for DAG"""
dag: DAG = current_app.dag_bag.get_dag(dag_id)
if not dag:
raise NotFound("DAG not found")
tasks = dag.tasks
try:
tasks = sorted(tasks, key=attrgetter(order_by.lstrip('-')), reverse=(order_by[0:1] == '-'))
except AttributeError as err:
raise BadRequest(detail=str(err))
task_collection = TaskCollection(tasks=tasks, total_entries=len(tasks))
return task_collection_schema.dump(task_collection)
|
apache/incubator-airflow
|
airflow/api_connexion/endpoints/task_endpoint.py
|
Python
|
apache-2.0
| 2,401
|
import mock
from .... import base
from pulp.server.db.migrate.models import MigrationModule
from pulp.server import managers
from pulp.server.db.model.event import EventListener
class TestMigration0002(base.PulpServerTests):
@mock.patch('pulp.server.db.model.event.EventListener.get_collection')
def test_update_called(self, mock_get_collection):
module = MigrationModule('pulp.server.db.migrations.0002_rename_http_notifier')._module
module.migrate()
# make sure the correct mongo query is being passed down
mock_get_collection.return_value.update.assert_called_once_with(
{'notifier_type_id': 'rest-api'}, {'$set': {'notifier_type_id': 'http'}}
)
def test_database_integration(self):
# make sure the migration works on a live document in mongo
collection = EventListener.get_collection()
event_listener_id = str(collection.insert({
'notifier_type_id': 'rest-api',
'event_types': ['*'],
'notifier_config': {},
}, safe=True))
event_listener_factory = managers.factory.event_listener_manager()
module = MigrationModule('pulp.server.db.migrations.0002_rename_http_notifier')._module
module.migrate()
event_listener = event_listener_factory.get(event_listener_id)
self.assertEqual(event_listener['notifier_type_id'], 'http')
# cleanup
collection.remove()
|
credativ/pulp
|
server/test/unit/server/db/migrations/test_0002_rename_http_notifier.py
|
Python
|
gpl-2.0
| 1,448
|
import time
import rlp
import trie
import db
import utils
import processblock
import transactions
import logging
import copy
import sys
from repoze.lru import lru_cache
# logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
INITIAL_DIFFICULTY = 2 ** 17
GENESIS_PREVHASH = '\00' * 32
GENESIS_COINBASE = "0" * 40
GENESIS_NONCE = utils.sha3(chr(42))
GENESIS_GAS_LIMIT = 10 ** 6
MIN_GAS_LIMIT = 125000
GASLIMIT_EMA_FACTOR = 1024
BLOCK_REWARD = 1500 * utils.denoms.finney
UNCLE_REWARD = 15 * BLOCK_REWARD / 16
NEPHEW_REWARD = BLOCK_REWARD / 32
BLOCK_DIFF_FACTOR = 1024
GENESIS_MIN_GAS_PRICE = 0
BLKLIM_FACTOR_NOM = 6
BLKLIM_FACTOR_DEN = 5
DIFF_ADJUSTMENT_CUTOFF = 5
RECORDING = 1
NONE = 0
VERIFYING = -1
GENESIS_INITIAL_ALLOC = \
{"51ba59315b3a95761d0863b05ccc7a7f54703d99": 2 ** 200, # (G)
"e6716f9544a56c530d868e4bfbacb172315bdead": 2 ** 200, # (J)
"b9c015918bdaba24b4ff057a92a3873d6eb201be": 2 ** 200, # (V)
"1a26338f0d905e295fccb71fa9ea849ffa12aaf4": 2 ** 200, # (A)
"2ef47100e0787b915105fd5e3f4ff6752079d5cb": 2 ** 200, # (M)
"cd2a3d9f938e13cd947ec05abc7fe734df8dd826": 2 ** 200, # (R)
"6c386a4b26f73c802f34673f7248bb118f97424a": 2 ** 200, # (HH)
"e4157b34ea9615cfbde6b4fda419828124b70c78": 2 ** 200, # (CH)
}
block_structure = [
["prevhash", "bin", "\00" * 32],
["uncles_hash", "bin", utils.sha3(rlp.encode([]))],
["coinbase", "addr", GENESIS_COINBASE],
["state_root", "trie_root", trie.BLANK_ROOT],
["tx_list_root", "trie_root", trie.BLANK_ROOT],
["difficulty", "int", INITIAL_DIFFICULTY],
["number", "int", 0],
["min_gas_price", "int", GENESIS_MIN_GAS_PRICE],
["gas_limit", "int", GENESIS_GAS_LIMIT],
["gas_used", "int", 0],
["timestamp", "int", 0],
["extra_data", "bin", ""],
["nonce", "bin", ""],
]
block_structure_rev = {}
for i, (name, typ, default) in enumerate(block_structure):
block_structure_rev[name] = [i, typ, default]
acct_structure = [
["nonce", "int", 0],
["balance", "int", 0],
["storage", "trie_root", trie.BLANK_ROOT],
["code", "hash", ""],
]
acct_structure_rev = {}
for i, (name, typ, default) in enumerate(acct_structure):
acct_structure_rev[name] = [i, typ, default]
def calc_difficulty(parent, timestamp):
offset = parent.difficulty / BLOCK_DIFF_FACTOR
sign = 1 if timestamp - parent.timestamp < DIFF_ADJUSTMENT_CUTOFF else -1
return parent.difficulty + offset * sign
def calc_gaslimit(parent):
prior_contribution = parent.gas_limit * (GASLIMIT_EMA_FACTOR - 1)
new_contribution = parent.gas_used * BLKLIM_FACTOR_NOM / BLKLIM_FACTOR_DEN
gl = (prior_contribution + new_contribution) / GASLIMIT_EMA_FACTOR
return max(gl, MIN_GAS_LIMIT)
class UnknownParentException(Exception):
pass
class TransientBlock(object):
"""
Read only, non persisted, not validated representation of a block
"""
def __init__(self, rlpdata):
self.rlpdata = rlpdata
self.header_args, transaction_list, uncles = rlp.decode(rlpdata)
self.hash = utils.sha3(rlp.encode(self.header_args))
self.transaction_list = transaction_list # rlp encoded transactions
self.uncles = uncles
for i, (name, typ, default) in enumerate(block_structure):
setattr(self, name, utils.decoders[typ](self.header_args[i]))
def __repr__(self):
return '<TransientBlock(#%d %s %s)>' %\
(self.number, self.hash.encode('hex')[
:4], self.prevhash.encode('hex')[:4])
def check_header_pow(header):
assert len(header[-1]) == 32
rlp_Hn = rlp.encode(header[:-1])
nonce = header[-1]
diff = utils.decoders['int'](header[block_structure_rev['difficulty'][0]])
h = utils.sha3(utils.sha3(rlp_Hn) + nonce)
return utils.big_endian_to_int(h) < 2 ** 256 / diff
class Block(object):
def __init__(self,
prevhash='\00' * 32,
uncles_hash=block_structure_rev['uncles_hash'][2],
coinbase=block_structure_rev['coinbase'][2],
state_root=trie.BLANK_ROOT,
tx_list_root=trie.BLANK_ROOT,
difficulty=block_structure_rev['difficulty'][2],
number=0,
min_gas_price=block_structure_rev['min_gas_price'][2],
gas_limit=block_structure_rev['gas_limit'][2],
gas_used=0, timestamp=0, extra_data='', nonce='',
transaction_list=[],
uncles=[],
header=None):
self.prevhash = prevhash
self.uncles_hash = uncles_hash
self.coinbase = coinbase
self.difficulty = difficulty
self.number = number
self.min_gas_price = min_gas_price
self.gas_limit = gas_limit
self.gas_used = gas_used
self.timestamp = timestamp
self.extra_data = extra_data
self.nonce = nonce
self.uncles = uncles
self.suicides = []
self.postqueue = []
self.caches = {
'balance': {},
'nonce': {},
'code': {},
'all': {}
}
self.journal = []
self.transactions = trie.Trie(utils.get_db_path(), tx_list_root)
self.transaction_count = 0
self.state = trie.Trie(utils.get_db_path(), state_root)
self.proof_mode = None
self.proof_nodes = []
# If transaction_list is None, then it's a block header imported for
# SPV purposes
if transaction_list is not None:
# support init with transactions only if state is known
assert self.state.root_hash_valid()
for tx_lst_serialized, state_root, gas_used_encoded \
in transaction_list:
self._add_transaction_to_list(
tx_lst_serialized, state_root, gas_used_encoded)
if tx_list_root != self.transactions.root_hash:
raise Exception("Transaction list root hash does not match!")
if not self.is_genesis() and self.nonce and\
not check_header_pow(header or self.list_header()):
raise Exception("PoW check failed")
# make sure we are all on the same db
assert self.state.db.db == self.transactions.db.db
# use de/encoders to check type and validity
for name, typ, d in block_structure:
v = getattr(self, name)
assert utils.decoders[typ](utils.encoders[typ](v)) == v
# Basic consistency verifications
if not self.state.root_hash_valid():
raise Exception(
"State Merkle root not found in database! %r" % self)
if not self.transactions.root_hash_valid():
raise Exception(
"Transactions root not found in database! %r" % self)
if len(self.extra_data) > 1024:
raise Exception("Extra data cannot exceed 1024 bytes")
if self.coinbase == '':
raise Exception("Coinbase cannot be empty address")
def validate_uncles(self):
if utils.sha3(rlp.encode(self.uncles)) != self.uncles_hash:
return False
# Check uncle validity
ancestor_chain = [self]
# Uncle can have a block from 2-7 blocks ago as its parent
for i in [1, 2, 3, 4, 5, 6, 7]:
if ancestor_chain[-1].number > 0:
ancestor_chain.append(ancestor_chain[-1].get_parent())
ineligible = []
# Uncles of this block cannot be direct ancestors and cannot also
# be uncles included 1-6 blocks ago
for ancestor in ancestor_chain[1:]:
ineligible.extend(ancestor.uncles)
ineligible.extend([b.list_header() for b in ancestor_chain])
eligible_ancestor_hashes = [x.hash for x in ancestor_chain[2:]]
for uncle in self.uncles:
if not check_header_pow(uncle):
sys.stderr.write('1\n\n')
return False
# uncle's parent cannot be the block's own parent
prevhash = uncle[block_structure_rev['prevhash'][0]]
if prevhash not in eligible_ancestor_hashes:
logger.debug("%r: Uncle does not have a valid ancestor", self)
sys.stderr.write('2 ' + prevhash.encode('hex') + ' ' + str(map(lambda x: x.encode('hex'), eligible_ancestor_hashes)) + '\n\n')
return False
if uncle in ineligible:
sys.stderr.write('3\n\n')
logger.debug("%r: Duplicate uncle %r", self, utils.sha3(rlp.encode(uncle)).encode('hex'))
return False
ineligible.append(uncle)
return True
def is_genesis(self):
return self.prevhash == GENESIS_PREVHASH and \
self.nonce == GENESIS_NONCE
def check_proof_of_work(self, nonce):
H = self.list_header()
H[-1] = nonce
return check_header_pow(H)
@classmethod
def deserialize_header(cls, header_data):
if isinstance(header_data, (str, unicode)):
header_data = rlp.decode(header_data)
assert len(header_data) == len(block_structure)
kargs = {}
# Deserialize all properties
for i, (name, typ, default) in enumerate(block_structure):
kargs[name] = utils.decoders[typ](header_data[i])
return kargs
@classmethod
def deserialize(cls, rlpdata):
header_args, transaction_list, uncles = rlp.decode(rlpdata)
kargs = cls.deserialize_header(header_args)
kargs['header'] = header_args
kargs['transaction_list'] = transaction_list
kargs['uncles'] = uncles
# if we don't have the state we need to replay transactions
_db = db.DB(utils.get_db_path())
if len(kargs['state_root']) == 32 and kargs['state_root'] in _db:
return Block(**kargs)
elif kargs['prevhash'] == GENESIS_PREVHASH:
return Block(**kargs)
else: # no state, need to replay
try:
parent = get_block(kargs['prevhash'])
except KeyError:
raise UnknownParentException(kargs['prevhash'].encode('hex'))
return parent.deserialize_child(rlpdata)
@classmethod
def init_from_header(cls, rlpdata):
kargs = cls.deserialize_header(rlpdata)
kargs['transaction_list'] = None
kargs['uncles'] = None
return Block(**kargs)
def deserialize_child(self, rlpdata):
"""
deserialization w/ replaying transactions
"""
header_args, transaction_list, uncles = rlp.decode(rlpdata)
assert len(header_args) == len(block_structure)
kargs = dict(transaction_list=transaction_list, uncles=uncles)
# Deserialize all properties
for i, (name, typ, default) in enumerate(block_structure):
kargs[name] = utils.decoders[typ](header_args[i])
block = Block.init_from_parent(self, kargs['coinbase'],
extra_data=kargs['extra_data'],
timestamp=kargs['timestamp'],
uncles=uncles)
# replay transactions
for tx_lst_serialized, _state_root, _gas_used_encoded in \
transaction_list:
tx = transactions.Transaction.create(tx_lst_serialized)
# logger.debug('state:\n%s', utils.dump_state(block.state))
# logger.debug('applying %r', tx)
success, output = processblock.apply_transaction(block, tx)
#block.add_transaction_to_list(tx) # < this is done by processblock
# logger.debug('state:\n%s', utils.dump_state(block.state))
logger.debug('d %s %s', _gas_used_encoded, block.gas_used)
assert utils.decode_int(_gas_used_encoded) == block.gas_used, \
"Gas mismatch (ours %d, theirs %d) on block: %r" % \
(block.gas_used, _gas_used_encoded, block.to_dict(False, True, True))
assert _state_root == block.state.root_hash, \
"State root mismatch (ours %r theirs %r) on block: %r" % \
(block.state.root_hash.encode('hex'),
_state_root.encode('hex'),
block.to_dict(False, True, True))
block.finalize()
block.uncles_hash = kargs['uncles_hash']
block.nonce = kargs['nonce']
block.min_gas_price = kargs['min_gas_price']
# checks
assert block.prevhash == self.hash
assert block.gas_used == kargs['gas_used']
assert block.gas_limit == kargs['gas_limit']
assert block.timestamp == kargs['timestamp']
assert block.difficulty == kargs['difficulty']
assert block.number == kargs['number']
assert block.extra_data == kargs['extra_data']
assert utils.sha3(rlp.encode(block.uncles)) == kargs['uncles_hash']
assert block.tx_list_root == kargs['tx_list_root']
assert block.state.root_hash == kargs['state_root'], (block.state.root_hash, kargs['state_root'])
return block
@classmethod
def hex_deserialize(cls, hexrlpdata):
return cls.deserialize(hexrlpdata.decode('hex'))
def mk_blank_acct(self):
if not hasattr(self, '_blank_acct'):
codehash = ''
self.state.db.put(codehash, '')
self._blank_acct = [utils.encode_int(0),
utils.encode_int(0),
trie.BLANK_ROOT,
codehash]
return self._blank_acct[:]
def get_acct(self, address):
if len(address) == 40:
address = address.decode('hex')
acct = rlp.decode(self.state.get(address)) or self.mk_blank_acct()
return tuple(utils.decoders[t](acct[i])
for i, (n, t, d) in enumerate(acct_structure))
# _get_acct_item(bin or hex, int) -> bin
def _get_acct_item(self, address, param):
''' get account item
:param address: account address, can be binary or hex string
:param param: parameter to get
'''
if param != 'storage' and address in self.caches[param]:
return self.caches[param][address]
return self.get_acct(address)[acct_structure_rev[param][0]]
# _set_acct_item(bin or hex, int, bin)
def _set_acct_item(self, address, param, value):
''' set account item
:param address: account address, can be binary or hex string
:param param: parameter to set
:param value: new value
'''
# logger.debug('set acct %r %r %d', address, param, value)
self.set_and_journal(param, address, value)
self.set_and_journal('all', address, True)
def set_and_journal(self, cache, index, value):
prev = self.caches[cache].get(index, None)
if prev != value:
self.journal.append([cache, index, prev, value])
self.caches[cache][index] = value
# _delta_item(bin or hex, int, int) -> success/fail
def _delta_item(self, address, param, value):
''' add value to account item
:param address: account address, can be binary or hex string
:param param: parameter to increase/decrease
:param value: can be positive or negative
'''
value = self._get_acct_item(address, param) + value
if value < 0:
return False
self._set_acct_item(address, param, value)
return True
def _add_transaction_to_list(self, tx_lst_serialized,
state_root, gas_used_encoded):
# adds encoded data # FIXME: the constructor should get objects
assert isinstance(tx_lst_serialized, list)
data = [tx_lst_serialized, state_root, gas_used_encoded]
self.transactions.update(
rlp.encode(utils.encode_int(self.transaction_count)),
rlp.encode(data))
self.transaction_count += 1
def add_transaction_to_list(self, tx):
tx_lst_serialized = rlp.decode(tx.serialize())
self._add_transaction_to_list(tx_lst_serialized,
self.state_root,
utils.encode_int(self.gas_used))
def _list_transactions(self):
# returns [[tx_lst_serialized, state_root, gas_used_encoded],...]
txlist = []
for i in range(self.transaction_count):
txlist.append(self.get_transaction(i))
return txlist
def get_transaction(self, num):
# returns [tx_lst_serialized, state_root, gas_used_encoded]
return rlp.decode(self.transactions.get(rlp.encode(utils.encode_int(num))))
def get_transactions(self):
return [transactions.Transaction.create(tx) for
tx, s, g in self._list_transactions()]
def get_nonce(self, address):
return self._get_acct_item(address, 'nonce')
def set_nonce(self, address, value):
return self._set_acct_item(address, 'nonce', value)
def increment_nonce(self, address):
return self._delta_item(address, 'nonce', 1)
def decrement_nonce(self, address):
return self._delta_item(address, 'nonce', -1)
def get_balance(self, address):
return self._get_acct_item(address, 'balance')
def set_balance(self, address, value):
self._set_acct_item(address, 'balance', value)
def delta_balance(self, address, value):
return self._delta_item(address, 'balance', value)
def transfer_value(self, from_addr, to_addr, value):
assert value >= 0
if self.delta_balance(from_addr, -value):
return self.delta_balance(to_addr, value)
return False
def get_code(self, address):
return self._get_acct_item(address, 'code')
def set_code(self, address, value):
self._set_acct_item(address, 'code', value)
def get_storage(self, address):
storage_root = self._get_acct_item(address, 'storage')
return trie.Trie(utils.get_db_path(), storage_root)
def get_storage_data(self, address, index):
if 'storage:'+address in self.caches:
if index in self.caches['storage:'+address]:
return self.caches['storage:'+address][index]
t = self.get_storage(address)
t.proof_mode = self.proof_mode
t.proof_nodes = self.proof_nodes
key = utils.zpad(utils.coerce_to_bytes(index), 32)
val = rlp.decode(t.get(key))
if self.proof_mode == RECORDING:
self.proof_nodes.extend(t.proof_nodes)
return utils.big_endian_to_int(val) if val else 0
def set_storage_data(self, address, index, val):
if 'storage:'+address not in self.caches:
self.caches['storage:'+address] = {}
self.set_and_journal('all', address, True)
self.set_and_journal('storage:'+address, index, val)
def commit_state(self):
changes = []
if not len(self.journal):
processblock.pblogger.log('delta', changes=[])
return
for address in self.caches['all']:
acct = rlp.decode(self.state.get(address.decode('hex'))) \
or self.mk_blank_acct()
for i, (key, typ, default) in enumerate(acct_structure):
if key == 'storage':
t = trie.Trie(utils.get_db_path(), acct[i])
t.proof_mode = self.proof_mode
t.proof_nodes = self.proof_nodes
for k, v in self.caches.get('storage:'+address, {}).iteritems():
enckey = utils.zpad(utils.coerce_to_bytes(k), 32)
val = rlp.encode(utils.int_to_big_endian(v))
changes.append(['storage', address, k, v])
if v:
t.update(enckey, val)
else:
t.delete(enckey)
acct[i] = t.root_hash
if self.proof_mode == RECORDING:
self.proof_nodes.extend(t.proof_nodes)
else:
if address in self.caches[key]:
v = self.caches[key].get(address, default)
changes.append([key, address, v])
acct[i] = utils.encoders[acct_structure[i][1]](v)
self.state.update(address.decode('hex'), rlp.encode(acct))
if self.proof_mode == RECORDING:
self.proof_nodes.extend(self.state.proof_nodes)
self.state.proof_nodes = []
if processblock.pblogger.log_state_delta:
processblock.pblogger.log('delta', changes=changes)
self.reset_cache()
def del_account(self, address):
self.commit_state()
if len(address) == 40:
address = address.decode('hex')
self.state.delete(address)
def account_to_dict(self, address, with_storage_root=False,
with_storage=True, for_vmtest=False):
if with_storage_root:
assert len(self.journal) == 0
med_dict = {}
for i, val in enumerate(self.get_acct(address)):
name, typ, default = acct_structure[i]
key = acct_structure[i][0]
if name == 'storage':
strie = trie.Trie(utils.get_db_path(), val)
if with_storage_root:
med_dict['storage_root'] = strie.get_root_hash().encode('hex')
else:
med_dict[key] = self.caches[key].get(address, utils.printers[typ](val))
if with_storage:
med_dict['storage'] = {}
d = strie.to_dict()
subcache = self.caches.get('storage:'+address, {})
subkeys = [utils.zpad(utils.coerce_to_bytes(kk), 32) for kk in subcache.keys()]
for k in d.keys() + subkeys:
v = d.get(k, None)
v2 = subcache.get(utils.big_endian_to_int(k), None)
hexkey = '0x'+utils.zunpad(k).encode('hex')
if v2 is not None:
if v2 != 0:
med_dict['storage'][hexkey] = \
'0x'+utils.int_to_big_endian(v2).encode('hex')
elif v is not None:
med_dict['storage'][hexkey] = '0x'+rlp.decode(v).encode('hex')
return med_dict
def reset_cache(self):
self.caches = {
'all': {},
'balance': {},
'nonce': {},
'code': {},
}
self.journal = []
# Revert computation
def snapshot(self):
return {
'state': self.state.root_hash,
'gas': self.gas_used,
'txs': self.transactions,
'txcount': self.transaction_count,
'postqueue': copy.copy(self.postqueue),
'suicides': self.suicides,
'suicides_size': len(self.suicides),
'journal': self.journal, # pointer to reference, so is not static
'journal_size': len(self.journal)
}
def revert(self, mysnapshot):
self.journal = mysnapshot['journal']
logger.debug('reverting')
while len(self.journal) > mysnapshot['journal_size']:
cache, index, prev, post = self.journal.pop()
logger.debug('%r %r %r %r', cache, index, prev, post)
if prev is not None:
self.caches[cache][index] = prev
else:
del self.caches[cache][index]
self.suicides = mysnapshot['suicides']
while len(self.suicides) > mysnapshot['suicides_size']:
self.suicides.pop()
self.state.root_hash = mysnapshot['state']
self.gas_used = mysnapshot['gas']
self.transactions = mysnapshot['txs']
self.transaction_count = mysnapshot['txcount']
self.postqueue = mysnapshot['postqueue']
def finalize(self):
"""
Apply rewards
We raise the block's coinbase account by Rb, the block reward,
and the coinbase of each uncle by 7 of 8 that.
Rb = 1500 finney
"""
self.delta_balance(self.coinbase,
BLOCK_REWARD + NEPHEW_REWARD * len(self.uncles))
for uncle_rlp in self.uncles:
uncle_data = Block.deserialize_header(uncle_rlp)
self.delta_balance(uncle_data['coinbase'], UNCLE_REWARD)
self.commit_state()
def serialize_header_without_nonce(self):
return rlp.encode(self.list_header(exclude=['nonce']))
def get_state_root(self):
self.commit_state()
return self.state.root_hash
def set_state_root(self, state_root_hash):
self.state = trie.Trie(utils.get_db_path(), state_root_hash)
self.reset_cache()
state_root = property(get_state_root, set_state_root)
def get_tx_list_root(self):
return self.transactions.root_hash
tx_list_root = property(get_tx_list_root)
def list_header(self, exclude=[]):
header = []
for name, typ, default in block_structure:
# print name, typ, default , getattr(self, name)
if name not in exclude:
header.append(utils.encoders[typ](getattr(self, name)))
return header
def serialize(self):
# Serialization method; should act as perfect inverse function of the
# constructor assuming no verification failures
return rlp.encode([self.list_header(),
self._list_transactions(),
self.uncles])
def hex_serialize(self):
return self.serialize().encode('hex')
def serialize_header(self):
return rlp.encode(self.list_header())
def hex_serialize_header(self):
return rlp.encode(self.list_header()).encode('hex')
def to_dict(self, with_state=False, full_transactions=False,
with_storage_roots=False, with_uncles=False):
"""
serializes the block
with_state: include state for all accounts
full_transactions: include serialized tx (hashes otherwise)
with_uncles: include uncle hashes
"""
b = {}
for name, typ, default in block_structure:
b[name] = utils.printers[typ](getattr(self, name))
txlist = []
for i in range(self.transaction_count):
tx_rlp = self.transactions.get(rlp.encode(utils.encode_int(i)))
tx, msr, gas = rlp.decode(tx_rlp)
if full_transactions:
txjson = transactions.Transaction.create(tx).to_dict()
else:
txjson = utils.sha3(rlp.descend(tx_rlp, 0)).encode('hex') # tx hash
txlist.append({
"tx": txjson,
"medstate": msr.encode('hex'),
"gas": str(utils.decode_int(gas))
})
b["transactions"] = txlist
if with_state:
state_dump = {}
for address, v in self.state.to_dict().iteritems():
state_dump[address.encode('hex')] = \
self.account_to_dict(address, with_storage_roots)
b['state'] = state_dump
if with_uncles:
b['uncles'] = [utils.sha3(rlp.encode(u)).encode('hex') for u in self.uncles]
return b
def _hash(self):
return utils.sha3(self.serialize_header())
@property
def hash(self):
return self._hash()
def hex_hash(self):
return self.hash.encode('hex')
def get_parent(self):
if self.number == 0:
raise UnknownParentException('Genesis block has no parent')
try:
parent = get_block(self.prevhash)
except KeyError:
raise UnknownParentException(self.prevhash.encode('hex'))
#assert parent.state.db.db == self.state.db.db
return parent
def has_parent(self):
try:
self.get_parent()
return True
except UnknownParentException:
return False
def chain_difficulty(self):
# calculate the summarized_difficulty
if self.is_genesis():
return self.difficulty
elif 'difficulty:'+self.hex_hash() in self.state.db:
return utils.decode_int(
self.state.db.get('difficulty:'+self.hex_hash()))
else:
_idx, _typ, _ = block_structure_rev['difficulty']
o = self.difficulty + self.get_parent().chain_difficulty()
o += sum([utils.decoders[_typ](u[_idx]) for u in self.uncles])
self.state.db.put('difficulty:'+self.hex_hash(), utils.encode_int(o))
return o
def __eq__(self, other):
return isinstance(other, (Block, CachedBlock)) and self.hash == other.hash
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return self.number > other.number
def __lt__(self, other):
return self.number < other.number
def __repr__(self):
return '<Block(#%d %s %s)>' % (self.number,
self.hex_hash()[:4],
self.prevhash.encode('hex')[:4])
@classmethod
def init_from_parent(cls, parent, coinbase, extra_data='',
timestamp=int(time.time()), uncles=[]):
return Block(
prevhash=parent.hash,
uncles_hash=utils.sha3(rlp.encode(uncles)),
coinbase=coinbase,
state_root=parent.state.root_hash,
tx_list_root=trie.BLANK_ROOT,
difficulty=calc_difficulty(parent, timestamp),
number=parent.number + 1,
min_gas_price=0,
gas_limit=calc_gaslimit(parent),
gas_used=0,
timestamp=timestamp,
extra_data=extra_data,
nonce='',
transaction_list=[],
uncles=uncles)
def set_proof_mode(self, pm, pmnodes=None):
self.proof_mode = pm
self.state.proof_mode = pm
self.proof_nodes = pmnodes or []
self.state.proof_nodes = pmnodes or []
class CachedBlock(Block):
# note: immutable refers to: do not manipulate!
_hash_cached = None
def _set_acct_item(self): raise NotImplementedError
def _add_transaction_to_list(self): raise NotImplementedError
def set_state_root(self): raise NotImplementedError
def revert(self): raise NotImplementedError
def commit_state(self): pass
def _hash(self):
if not self._hash_cached:
self._hash_cached = Block._hash(self)
return self._hash_cached
@classmethod
def create_cached(cls, blk):
blk.__class__ = CachedBlock
return blk
@lru_cache(500)
def get_block(blockhash):
"""
Assumtion: blocks loaded from the db are not manipulated
-> can be cached including hash
"""
return CachedBlock.create_cached(Block.deserialize(db.DB(utils.get_db_path()).get(blockhash)))
def has_block(blockhash):
return blockhash in db.DB(utils.get_db_path())
def genesis(start_alloc=GENESIS_INITIAL_ALLOC, difficulty=INITIAL_DIFFICULTY):
# https://ethereum.etherpad.mozilla.org/11
block = Block(prevhash=GENESIS_PREVHASH, coinbase=GENESIS_COINBASE,
tx_list_root=trie.BLANK_ROOT,
difficulty=difficulty, nonce=GENESIS_NONCE,
gas_limit=GENESIS_GAS_LIMIT)
for addr, balance in start_alloc.iteritems():
block.set_balance(addr, balance)
block.state.db.commit()
return block
def dump_genesis_block_tests_data():
import json
g = genesis()
data = dict(
genesis_state_root=g.state_root.encode('hex'),
genesis_hash=g.hex_hash(),
genesis_rlp_hex=g.serialize().encode('hex'),
initial_alloc=dict()
)
for addr, balance in GENESIS_INITIAL_ALLOC.iteritems():
data['initial_alloc'][addr] = str(balance)
print json.dumps(data, indent=1)
|
jnnk/pyethereum
|
pyethereum/blocks.py
|
Python
|
mit
| 32,057
|
# Copyright (c) 2014-2015 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, GLib, Gio
from _thread import start_new_thread
from gettext import gettext as _
from lollypop.tunein import TuneIn
from lollypop.define import Lp, ArtSize
from lollypop.art import Art
class TuneinPopover(Gtk.Popover):
"""
Popover showing tunin radios
"""
def __init__(self, radio_manager):
"""
Init Popover
@param radio manager as RadioManager
"""
Gtk.Popover.__init__(self)
self._tunein = TuneIn()
self._radio_manager = radio_manager
self._current_url = None
self._previous_urls = []
self._current_items = []
self._stack = Gtk.Stack()
self._stack.set_property('expand', True)
self._stack.show()
builder = Gtk.Builder()
builder.add_from_resource('/org/gnome/Lollypop/TuneinPopover.ui')
builder.connect_signals(self)
widget = builder.get_object('widget')
widget.attach(self._stack, 0, 2, 4, 1)
self._back_btn = builder.get_object('back_btn')
self._home_btn = builder.get_object('home_btn')
self._label = builder.get_object('label')
self._view = Gtk.FlowBox()
self._view.set_selection_mode(Gtk.SelectionMode.NONE)
self._view.set_max_children_per_line(100)
self._view.set_property('row-spacing', 10)
self._view.set_property('expand', True)
self._view.show()
builder.get_object('viewport').add(self._view)
builder.get_object('viewport').set_property('margin', 10)
self._scrolled = builder.get_object('scrolled')
self._spinner = builder.get_object('spinner')
self._not_found = builder.get_object('notfound')
self._stack.add(self._spinner)
self._stack.add(self._not_found)
self._stack.add(self._scrolled)
self._stack.set_visible_child(self._spinner)
self.add(widget)
def populate(self, url=None):
"""
Populate views
@param url as string
"""
if not self._view.get_children():
self._current_url = url
self._clear()
self._back_btn.set_sensitive(False)
self._home_btn.set_sensitive(False)
self._label.set_text(_("Please wait..."))
start_new_thread(self._populate, (url,))
def do_show(self):
"""
Resize popover and set signals callback
"""
size_setting = Lp.settings.get_value('window-size')
if isinstance(size_setting[1], int):
self.set_size_request(700, size_setting[1]*0.7)
else:
self.set_size_request(700, 400)
Gtk.Popover.do_show(self)
#######################
# PRIVATE #
#######################
def _show_not_found(self):
"""
Show not found message
"""
self._label.set_text(_("Can't connect to TuneIn..."))
self._stack.set_visible_child(self._not_found)
self._home_btn.set_sensitive(True)
def _populate(self, url):
"""
Same as populate()
@param url as string
@thread safe
"""
if url is None:
self._current_items = self._tunein.get_items()
else:
self._current_items = self._tunein.get_items(url)
if self._current_items:
self._add_items()
else:
GLib.idle_add(self._show_not_found)
def _add_items(self):
"""
Add current items
@thread safe
"""
for item in self._current_items:
GLib.idle_add(self._add_item, item)
def _add_item(self, item):
"""
Add item
@param item as TuneItem
"""
child = Gtk.Grid()
child.set_property('halign', Gtk.Align.START)
child.show()
if item.TYPE == "audio":
button = Gtk.Button.new_from_icon_name('list-add-symbolic',
Gtk.IconSize.MENU)
button.connect('clicked', self._on_button_clicked, item)
button.set_relief(Gtk.ReliefStyle.NONE)
button.set_tooltip_text(_("Add"))
button.show()
child.add(button)
link = Gtk.LinkButton.new_with_label(item.URL, item.TEXT)
link.set_tooltip_text(_("Play"))
link.connect('activate-link', self._on_activate_link, item)
link.show()
child.add(link)
self._view.add(child)
# Remove spinner if exist
if self._spinner == self._stack.get_visible_child():
self._stack.set_visible_child(self._scrolled)
self._label.set_text(_("Browse themes and add a new radio"))
if self._current_url is not None:
self._back_btn.set_sensitive(True)
self._home_btn.set_sensitive(True)
def _clear(self):
"""
Clear view
"""
for child in self._view.get_children():
self._view.remove(child)
child.destroy()
def _add_radio(self, item):
"""
Add selected radio
@param item as TuneIn Item
"""
# Get cover art
try:
cache = Art._RADIOS_PATH
s = Gio.File.new_for_uri(item.LOGO)
d = Gio.File.new_for_path(cache+"/%s.png" %
item.TEXT.replace('/', '-'))
s.copy(d, Gio.FileCopyFlags.OVERWRITE, None, None)
except Exception as e:
print("TuneinPopover::_add_radio: %s" % e)
url = item.URL
# Tune in embbed uri in ashx files, so get content if possible
try:
f = Gio.File.new_for_uri(url)
(status, data, tag) = f.load_contents()
if status:
url = data.decode('utf-8')
except Exception as e:
print("TuneinPopover::_add_radio: %s" % e)
self._radio_manager.add(item.TEXT.replace('/', '-'))
self._radio_manager.add_track(item.TEXT.replace('/', '-'),
url)
def _on_back_btn_clicked(self, btn):
"""
Go to previous URL
@param btn as Gtk.Button
"""
url = None
self._current_url = None
if self._previous_urls:
url = self._previous_urls.pop()
self._stack.set_visible_child(self._spinner)
self._clear()
self.populate(url)
def _on_home_btn_clicked(self, btn):
"""
Go to root URL
@param btn as Gtk.Button
"""
self._current_url = None
self._previous_urls = []
self._stack.set_visible_child(self._spinner)
self._clear()
self.populate()
def _on_activate_link(self, link, item):
"""
Update header with new link
@param link as Gtk.LinkButton
@param item as TuneIn Item
"""
if item.TYPE == "link":
self._stack.set_visible_child(self._spinner)
self._clear()
self._scrolled.get_vadjustment().set_value(0.0)
if self._current_url is not None:
self._previous_urls.append(self._current_url)
self.populate(item.URL)
elif item.TYPE == "audio":
for i in self._current_items:
Lp.player.load_external(i.URL, i.TEXT)
Lp.player.play_this_external(item.URL)
# Only toolbar will get this one, so only create small in cache
if Gio.NetworkMonitor.get_default().get_network_available():
start_new_thread(Lp.art.copy_uri_to_cache, (item.LOGO,
item.TEXT,
ArtSize.SMALL))
return True
def _on_button_clicked(self, button, item):
"""
Play the radio
@param link as Gtk.Button
@param item as TuneIn Item
"""
start_new_thread(self._add_radio, (item,))
self.hide()
|
gigitux/lollypop
|
src/pop_tunein.py
|
Python
|
gpl-3.0
| 8,812
|
#!/usr/bin/env python
import curses
import random
import os
from samplebase import SampleBase
from threading import Thread, Lock
from golbase import GameOfLifeBase, Cell
COLUMNS = 'qwertyuiopasdfgh'
class KeyboardInput(GameOfLifeBase):
def __init__(self, *args, **kwargs):
super(KeyboardInput, self).__init__(*args, **kwargs)
self.animate = False
self.lock = Lock()
self.configure_args()
self.canvas = self.matrix.CreateFrameCanvas()
self.initializeCells()
self.win = None
self.animate_thread = None
# self.block_switch()
self.gosper_gun()
# self.drawCells()
def block_switch(self):
x = 8
y = 15
self.cells[x + 11][y + 6].alive = True
self.cells[x + 13][y + 6].alive = True
self.cells[x + 13][y + 5].alive = True
self.cells[x + 15][y + 4].alive = True
self.cells[x + 15][y + 3].alive = True
self.cells[x + 15][y + 2].alive = True
self.cells[x + 17][y + 3].alive = True
self.cells[x + 17][y + 2].alive = True
self.cells[x + 17][y + 1].alive = True
self.cells[x + 18][y + 2].alive = True
def gosper_gun(self):
x, y = 20, 7
self.cells[x][y + 5].alive = True
self.cells[x + 1][y + 5].alive = True
self.cells[x][y + 6].alive = True
self.cells[x + 1][y + 6].alive = True
self.cells[x + 10][y + 5].alive = True
self.cells[x + 10][y + 6].alive = True
self.cells[x + 10][y + 7].alive = True
self.cells[x + 11][y + 4].alive = True
self.cells[x + 11][y + 8].alive = True
self.cells[x + 12][y + 3].alive = True
self.cells[x + 12][y + 9].alive = True
self.cells[x + 13][y + 3].alive = True
self.cells[x + 13][y + 9].alive = True
self.cells[x + 14][y + 6].alive = True
self.cells[x + 15][y + 4].alive = True
self.cells[x + 15][y + 8].alive = True
self.cells[x + 16][y + 5].alive = True
self.cells[x + 16][y + 6].alive = True
self.cells[x + 16][y + 7].alive = True
self.cells[x + 17][y + 6].alive = True
self.cells[x + 20][y + 5].alive = True
self.cells[x + 20][y + 4].alive = True
self.cells[x + 20][y + 3].alive = True
self.cells[x + 21][y + 5].alive = True
self.cells[x + 21][y + 4].alive = True
self.cells[x + 21][y + 3].alive = True
self.cells[x + 22][y + 2].alive = True
self.cells[x + 22][y + 6].alive = True
self.cells[x + 24][y + 2].alive = True
self.cells[x + 24][y + 1].alive = True
self.cells[x + 24][y + 6].alive = True
self.cells[x + 24][y + 7].alive = True
self.cells[x + 34][y + 4].alive = True
self.cells[x + 34][y + 5].alive = True
self.cells[x + 35][y + 4].alive = True
self.cells[x + 35][y + 5].alive = True
def makeGlider(self, x, y):
self.cells[x + 0][y + 2].alive = True
self.cells[x + 1][y + 2].alive = True
self.cells[x + 2][y + 2].alive = True
self.cells[x + 2][y + 1].alive = True
self.cells[x + 1][y + 0].alive = True
def red_glider(self):
for i in range(0, 60, 5):
for j in range(0, 27, 5):
self.makeGlider(i, j)
def acorn(self):
x = 29
self.cells[x][16].alive = True
self.cells[x + 1][16].alive = True
self.cells[x + 1][14].alive = True
self.cells[x + 3][15].alive = True
self.cells[x + 3][16].alive = True
self.cells[x + 4][16].alive = True
self.cells[x + 5][16].alive = True
def stop(self):
self.animate = False
if self.animate_thread is not None:
self.animate_thread.join()
self.clear_screen()
def clear_screen(self):
self.win.addstr("\nClear: {0} - {1}".format(self.canvas.width, self.canvas.height))
for i in range(self.canvas.width):
for j in range(self.canvas.height):
self.canvas.SetPixel(i, j, 0, 0, 0)
self.canvas = self.matrix.SwapOnVSync(self.canvas)
def get_color(self):
rgb = []
for i in range(3):
rgb.append(random.randint(0, 255))
return tuple(rgb)
def run(self, key=""):
self.drawCells()
self.canvas = self.matrix.SwapOnVSync(self.canvas)
self.evolve()
def main(self, win):
self.animate = True
win.nodelay(True)
self.win = win
prev_key, key="", ""
iterations = 0
# for _ in range(75):
# self.evolve()
# self.drawCells()
# self.canvas = self.matrix.SwapOnVSync(self.canvas)
win.clear()
win.addstr("Detected key:")
while True:
try:
key = win.getkey()
win.clear()
win.addstr(str(key))
if key == os.linesep:
self.stop()
break
key = str(key)
if key:
# with self.lock:
# if self.animate:
# self.stop()
# self.animate = True
win.addstr("\nIterations: {0}".format(iterations))
self.run()
iterations += 1
except Exception as e:
# No input
pass
# Main function
if __name__ == "__main__":
keyboard_input = KeyboardInput()
curses.wrapper(keyboard_input.main)
|
yanigisawa/coffee-scale
|
pubsub/animation/gol-keyboard.py
|
Python
|
mit
| 5,570
|
# This file is part of the ISIS IBEX application.
# Copyright (C) 2012-2016 Science & Technology Facilities Council.
# All rights reserved.
#
# This program is distributed in the hope that it will be useful.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution.
# EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
# AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
# You should have received a copy of the Eclipse Public License v1.0
# along with this program; if not, you can obtain a copy from
# https://www.eclipse.org/org/documents/epl-v10.php or
# http://opensource.org/licenses/eclipse-1.0.php
from abc import ABCMeta, abstractmethod
class OnTheFlyPvInterface:
""" This is an abstract base class to ensure that any class that needs to handle on-the-fly PVs
implements all the correct methods.
"""
__metaclass__ = ABCMeta
def __init__(self):
self.pvs_to_read = []
self.pvs_to_write = []
def read_pv_exists(self, pv):
""" Checks whether the read PV is handled by this class.
If the read PV is handled by a monitor then this MUST return False
Args:
pv (string): The PV name
Returns:
bool: Whether the PV exists for reading
"""
return pv in self.pvs_to_read
def write_pv_exists(self, pv):
""" Checks whether the write PV is handled by this class.
Args:
pv (string): The PV name
Returns:
bool: Whether the PV exists for writing
"""
return pv in self.pvs_to_write
@abstractmethod
def handle_pv_write(self, pv: str, data: str):
""" Handles the request to write to the PV.
Note: implementations of this method MUST run on a separate thread.
Args:
pv: The PV's name
data: The value to write
"""
pass
@abstractmethod
def handle_pv_read(self, pv):
""" Handles the request to read the PV value
Args:
pv (string): The PV's name
Returns:
object: The value to return to the requesting client
"""
pass
@abstractmethod
def update_monitors(self):
""" Updates any monitors associated with the class.
"""
pass
@abstractmethod
def on_config_change(self, full_init=False):
""" Performs any tasks that need to be carried out on initialisation.
For example: on loading a new configuration.
Args:
full_init (bool): Whether it is a full initialisation
"""
pass
|
ISISComputingGroup/EPICS-inst_servers
|
BlockServer/core/on_the_fly_pv_interface.py
|
Python
|
bsd-3-clause
| 2,822
|
from datamodel import Library, Version, Status, VersionCache, CollectionReference, Dependency
from google.appengine.ext import ndb
from test_base import TestBase
class VersionCacheTests(TestBase):
def test_versions_for_key(self):
library_key = ndb.Key(Library, 'a/b')
Version(id='v2.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v1.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v3.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v3.0.X', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v4.0.0', sha='x', status=Status.error, parent=library_key).put()
Version(id='v5.0.0', sha='x', status=Status.pending, parent=library_key).put()
Version(id='xxx', sha='x', status=Status.ready, parent=library_key).put()
versions = yield Library.uncached_versions_for_key_async(library_key)
self.assertEqual(versions, ['v1.0.0', 'v2.0.0', 'v3.0.0'])
@ndb.toplevel
def test_version_cache(self):
library_key = ndb.Key(Library, 'a/b')
Version(id='v2.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v1.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v3.0.0', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v3.0.X', sha='x', status=Status.ready, parent=library_key).put()
Version(id='v4.0.0', sha='x', status=Status.error, parent=library_key).put()
Version(id='v5.0.0', sha='x', status=Status.pending, parent=library_key).put()
Version(id='xxx', sha='x', status=Status.ready, parent=library_key).put()
versions = yield Library.versions_for_key_async(library_key)
self.assertEqual(versions, [])
latest_changed = VersionCache.update(library_key)
self.assertTrue(latest_changed)
versions = yield Library.versions_for_key_async(library_key)
self.assertEqual(versions, ['v1.0.0', 'v2.0.0', 'v3.0.0', 'v4.0.0'])
Version(id='v6.0.0', sha='x', status=Status.ready, parent=library_key).put()
latest_changed = VersionCache.update(library_key)
self.assertTrue(latest_changed)
versions = yield Library.versions_for_key_async(library_key)
self.assertEqual(versions, ['v1.0.0', 'v2.0.0', 'v3.0.0', 'v4.0.0', 'v6.0.0'])
class CollectionReferenceTests(TestBase):
@ndb.toplevel
def test_stale_ref_is_removed(self):
# Stale since the collection version doesn't actually exist.
collection_v0 = ndb.Key(Library, 'collection/1', Version, 'v0.5.0')
element_key = ndb.Key(Library, 'ele/ment')
element_v1 = Version(id='v1.0.0', sha='x', status=Status.ready, parent=element_key).put()
ref0 = CollectionReference.ensure(element_key, collection_v0, '^1.0.0')
collections = yield Version.collections_for_key_async(element_v1)
collection_keys = [collection.key for collection in collections]
self.assertIsNone(ref0.get())
self.assertEqual(collection_keys, [])
@ndb.toplevel
def test_latest_matching_collection_version_is_returned(self):
collection_key = ndb.Key(Library, 'collection/1')
collection_v1 = Version(id='v1.0.0', sha='x', status=Status.ready, parent=collection_key).put()
collection_v2 = Version(id='v2.0.0', sha='x', status=Status.ready, parent=collection_key).put()
collection_v3 = Version(id='v3.0.0', sha='x', status=Status.ready, parent=collection_key).put()
element_key = ndb.Key(Library, 'ele/ment')
element_v1 = Version(id='v1.0.0', sha='x', status=Status.ready, parent=element_key).put()
CollectionReference.ensure(element_key, collection_v1, '^1.0.0')
CollectionReference.ensure(element_key, collection_v2, '^1.0.0')
CollectionReference.ensure(element_key, collection_v3, '^2.0.0')
collections = yield Version.collections_for_key_async(element_v1)
collection_keys = [collection.key for collection in collections]
# Only latest matching version of the collection should be present.
self.assertEqual(collection_keys, [
collection_v2,
])
class DependencyTests(TestBase):
def test_from_string(self):
dependency = Dependency.from_string('owner/repo')
self.assertEqual(dependency.owner, 'owner')
self.assertEqual(dependency.repo, 'repo')
self.assertEqual(dependency.version, '*')
dependency = Dependency.from_string('https://github.com/owner/repo.git#master')
self.assertEqual(dependency.owner, 'owner')
self.assertEqual(dependency.repo, 'repo')
self.assertEqual(dependency.version, 'master')
dependency = Dependency.from_string('https://github.com/owner/repo')
self.assertEqual(dependency.owner, 'owner')
self.assertEqual(dependency.repo, 'repo')
self.assertEqual(dependency.version, '*')
class LibraryGithubFromUrl(TestBase):
def test_from_url(self):
self.assertEqual(Library.github_from_url('owner/repo'), ('owner', 'repo'))
self.assertEqual(Library.github_from_url('git+https://github.com/owner/repo.git'), ('owner', 'repo'))
self.assertEqual(Library.github_from_url('git://github.com/owner/repo.git'), ('owner', 'repo'))
|
webcomponents/webcomponents.org
|
src/datamodel_test.py
|
Python
|
apache-2.0
| 5,078
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port
from neutron_lib.tests.unit.api.definitions import base
class PortDefinitionTestCase(base.DefinitionBaseTestCase):
extension_module = port
extension_attributes = ()
|
openstack/neutron-lib
|
neutron_lib/tests/unit/api/definitions/test_port.py
|
Python
|
apache-2.0
| 793
|
from itertools import product
class Factor (object):
"""
Clase Factor para distribuciones de probabilidad conjuntas que implementa
las operaciones Multiplicación, Reducción, Normalización y Marginalización.
"""
def __init__ (self, variables, probabilidades):
"""
Crea un nuevo factor con la lista de variables dadas y las
probabilodades correspondientes a cada renglón del factor, en el orden
en que se listan las variables y la cardinalidad de su soporte.
:param variables: La lista de variables del factor.
:param probabilidades: Las probabilidades del factor (una entrada por)
renglón.
"""
# Listas de pares llave-valor:
# Llave: Nombre de la variable, "X"
# Valor: Carindalidad del soporte de la variable, 3 -> [0,1,2]
self.variables = variables
# Lista de números:
# Renglones del factor, ordenados en el orden que aparecen las
# variables en self.variables
self.probabilidades = probabilidades
def __indice (self, variable):
# Busca el índice de una variable en la lista de variables del factor
# Si no la encuentra lanza una excepción ValueError.
i = 0
for (k, _) in self.variables:
if k == variable:
return i
i += 1
raise ValueError ('La variable %s no está en el factor' % variable)
@classmethod
def multiplicacion (factor1, factor2):
"""
Multiplica los factores dados, FACTOR1 y FACTOR2, y regresa el
resultado.
:param factor1: El primer factor.
:param factor2: El segundo factor.
"""
pass
def reduccion (self, variable, valor):
"""
Reduce un factor dada una variable y el valor que debe tener dicha
variable.
:param variable: La variable con la que se va a reducir.
:param valor: El valor de la variable que se debe cumplir.
"""
indice = self.__indice (variable)
c = self.variables[indice][1]
variables = [(k, v) for (k, v) in self.variables if k != variable]
sop = [v for (k, v) in self.variables]
gaps = 1
m = len (sop)
for i in range (m):
gaps *= sop[m-(i+1)]
if m-(i+1) == indice:
break
probabilidades = []
# disgusting, fix!
try:
j = 0
while True:
m = gaps // c
for i in range (m):
probabilidades.append (self.probabilidades[gaps*j + m*valor + i])
j += 1
except Exception:
pass
return Factor (variables, probabilidades)
def normalizar (self):
"""
Normaliza el factor.
"""
t = sum (self.probabilidades)
self.probabilidades = [x / t for x in self.probabilidades]
def marginalizar (self, variable):
"""
Marginaliza una variable del factor.
:param variable: La variable a marginalizar.
"""
pass
|
Gilberto-Lopez/Inteligencia-Artificial
|
Practica08/Factor.py
|
Python
|
lgpl-3.0
| 2,616
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runner of the AQuaDQN agent."""
from absl import app
from absl import flags
import acme
from acme import specs
from acme.agents.jax import dqn
from acme.jax.layouts import local_layout
from acme.utils import loggers
import jax
from aquadem import builder as aquadem_builder
from aquadem import config
from aquadem import networks as aquadem_networks
from aquadem import utils
FLAGS = flags.FLAGS
flags.DEFINE_string('workdir', '/tmp/aquadqn', 'Log directory')
flags.DEFINE_string('env_name', 'door-human-v1', 'What environment to run')
flags.DEFINE_integer('num_demonstrations', None,
'Number of expert demonstrations to use.')
flags.DEFINE_integer('num_steps', 1000000,
'Number of env steps to run training for.')
flags.DEFINE_integer('eval_every', 10000, 'Evaluation frequency.')
flags.DEFINE_integer('seed', 0, 'Seed of the RL agent.')
def main(_):
# Create an environment, grab the spec.
environment = utils.make_environment(task=FLAGS.env_name)
aqua_config = config.AquademConfig()
spec = specs.make_environment_spec(environment)
discretized_spec = aquadem_builder.discretize_spec(spec,
aqua_config.num_actions)
# Create AQuaDem builder.
loss_fn = dqn.losses.MunchausenQLearning(max_abs_reward=100.)
dqn_config = dqn.DQNConfig(
samples_per_insert_tolerance_rate=float('inf'),
min_replay_size=1,
n_step=3,
num_sgd_steps_per_step=8,
learning_rate=1e-4,
samples_per_insert=256)
rl_agent = dqn.DQNBuilder(config=dqn_config, loss_fn=loss_fn)
make_demonstrations = utils.get_make_demonstrations_fn(
FLAGS.env_name, FLAGS.num_demonstrations, FLAGS.seed)
builder = aquadem_builder.AquademBuilder(
rl_agent=rl_agent,
config=aqua_config,
make_demonstrations=make_demonstrations)
# Create networks.
q_network = aquadem_networks.make_q_network(
spec=discretized_spec,)
networks = aquadem_networks.make_action_candidates_network(
spec=spec,
num_actions=aqua_config.num_actions,
discrete_rl_networks=q_network)
exploration_epsilon = 0.01
discrete_policy = dqn.default_behavior_policy(q_network, exploration_epsilon)
behavior_policy = aquadem_builder.get_aquadem_policy(discrete_policy,
networks)
# Create the environment loop used for training.
agent = local_layout.LocalLayout(
seed=FLAGS.seed,
environment_spec=spec,
builder=builder,
networks=networks,
policy_network=behavior_policy,
batch_size=dqn_config.batch_size * dqn_config.num_sgd_steps_per_step,
samples_per_insert=dqn_config.samples_per_insert)
train_logger = loggers.CSVLogger(FLAGS.workdir, label='train')
train_loop = acme.EnvironmentLoop(environment, agent, logger=train_logger)
# Create the evaluation actor and loop.
eval_policy = dqn.default_behavior_policy(q_network, 0.)
eval_policy = aquadem_builder.get_aquadem_policy(eval_policy, networks)
eval_actor = builder.make_actor(
random_key=jax.random.PRNGKey(FLAGS.seed),
policy_network=eval_policy,
variable_source=agent)
eval_env = utils.make_environment(task=FLAGS.env_name, evaluation=True)
eval_logger = loggers.CSVLogger(FLAGS.workdir, label='eval')
eval_loop = acme.EnvironmentLoop(eval_env, eval_actor, logger=eval_logger)
assert FLAGS.num_steps % FLAGS.eval_every == 0
for _ in range(FLAGS.num_steps // FLAGS.eval_every):
eval_loop.run(num_episodes=10)
train_loop.run(num_steps=FLAGS.eval_every)
eval_loop.run(num_episodes=10)
if __name__ == '__main__':
app.run(main)
|
google-research/google-research
|
aquadem/run_aquadqn.py
|
Python
|
apache-2.0
| 4,277
|
__version__="1.5.7.2"
|
darvin/qtdjango
|
src/qtdjango/__init__.py
|
Python
|
gpl-2.0
| 22
|
from devassistant import argument
from devassistant import assistant_base
from devassistant import settings
from devassistant import yaml_assistant_loader
class ExecutableAssistant(assistant_base.AssistantBase):
aliases = []
args = [argument.Argument('deps_only',
settings.DEPS_ONLY_FLAG,
help='Only install dependencies',
required=False,
action='store_true')]
def get_all_names(self):
return [self.name] + self.aliases
class CreatorAssistant(ExecutableAssistant):
def get_subassistants(self):
sa = yaml_assistant_loader.YamlAssistantLoader.get_assistants(superassistants=[self])
return sa
name = 'crt'
aliases = ['create']
fullname = 'Create Project'
description = 'Kickstart new projects easily with DevAssistant.'
class TweakAssistant(ExecutableAssistant):
def get_subassistants(self):
sa = yaml_assistant_loader.YamlAssistantLoader.get_assistants(superassistants=[self])
return sa
name = 'twk'
# TODO: in 1.0.0, remove mod and modify
aliases = ['tweak', 'mod', 'modify']
fullname = 'Tweak Existing Project'
description = 'Tweak existing projects with DevAssistant.'
class PreparerAssistant(ExecutableAssistant):
def get_subassistants(self):
sa = yaml_assistant_loader.YamlAssistantLoader.get_assistants(superassistants=[self])
return sa
name = 'prep'
aliases = ['prepare']
fullname = 'Prepare Environment'
description = 'Prepare environment for upstream projects with DevAssistant.'
class ExtrasAssistant(ExecutableAssistant):
def get_subassistants(self):
sa = yaml_assistant_loader.YamlAssistantLoader.get_assistants(superassistants=[self])
return sa
name = 'extra'
# TODO: in 1.0.0, remove task
aliases = ['extras', 'task']
fullname = 'Extras'
description = 'Perform a custom task not related to a specific project.'
class TopAssistant(assistant_base.AssistantBase):
_assistants = []
def get_subassistants(self):
# cache assistants to always return the same instances
if not self._assistants:
self._assistants = [CreatorAssistant(), TweakAssistant(),
PreparerAssistant(), ExtrasAssistant()]
return self._assistants
|
oskopek/devassistant
|
devassistant/bin.py
|
Python
|
gpl-2.0
| 2,407
|
# -*- coding:utf-8 -*-
# !/usr/bin/env python
#
# Author: promisejohn
# Email: promise.john@gmail.com
#
# Manage.py实现应用管理工具
#
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
# Run python scripts/manage.py cmd
import sys
sys.path.append('.')
from prony import app, db # flake8: noqa
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
# 创建数据库结构
@manager.command
def initdb():
db.create_all()
print 'Database inited, location: ' + app.config['SQLALCHEMY_DATABASE_URI']
# 清除数据
@manager.command
def dropdb():
db.drop_all()
print 'Database droped.'
# 实现manage.py shell的自动导入对象
def _make_context():
return dict(app=app, db=db)
manager.add_command("shell", Shell(make_context=_make_context))
if __name__ == '__main__':
manager.run()
|
promisejohn/storeback
|
scripts/manage.py
|
Python
|
apache-2.0
| 907
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Partially based on AboutMessagePassing in the Ruby Koans
#
from runner.koan import *
class AboutAttributeAccess(Koan):
class TypicalObject(object):
pass
def test_calling_undefined_functions_normally_results_in_errors(self):
typical = self.TypicalObject()
try:
typical.foobar()
except Exception as exception:
self.assertEqual('AttributeError', type(exception).__name__)
self.assertMatch('has no attribute', exception[0])
def test_calling_getattribute_causes_an_attribute_error(self):
typical = self.TypicalObject()
try:
typical.__getattribute__('foobar')
except AttributeError as exception:
self.assertMatch('has no attribute', exception[0])
# THINK ABOUT IT:
#
# If the method __getattribute__() causes the AttributeError, then
# what would happen if we redefine __getattribute__()?
# ------------------------------------------------------------------
class CatchAllAttributeReads(object):
def __getattribute__(self, attr_name):
return "Someone called '" + attr_name + \
"' and it could not be found"
def test_all_attribute_reads_are_caught(self):
catcher = self.CatchAllAttributeReads()
self.assertMatch('Someone called', catcher.foobar)
def test_intercepting_return_values_can_disrupt_the_call_chain(self):
catcher = self.CatchAllAttributeReads()
self.assertMatch("Someone called", catcher.foobaz) # This is fine
try:
catcher.foobaz(1)
except TypeError as ex:
self.assertMatch("'str' object is not callable", ex[0])
# foobaz returns a string. What happens to the '(1)' part?
# Try entering this into a python console to reproduce the issue:
#
# "foobaz"(1)
#
def test_changing_getattribute_will_affect__the_getattr_function(self):
catcher = self.CatchAllAttributeReads()
self.assertMatch("Someone called", getattr(catcher, 'any_attribute'))
# ------------------------------------------------------------------
class WellBehavedFooCatcher(object):
def __getattribute__(self, attr_name):
if attr_name[:3] == "foo":
return "Foo to you too"
else:
return \
super(AboutAttributeAccess.WellBehavedFooCatcher, self). \
__getattribute__(attr_name)
def test_foo_attributes_are_caught(self):
catcher = self.WellBehavedFooCatcher()
self.assertEqual("Foo to you too", catcher.foo_bar)
self.assertEqual("Foo to you too", catcher.foo_baz)
def test_non_foo_messages_are_treated_normally(self):
catcher = self.WellBehavedFooCatcher()
try:
catcher.normal_undefined_attribute
except AttributeError as ex:
self.assertMatch("WellBehavedFooCatcher' object has no attribute", ex[0])
# ------------------------------------------------------------------
global stack_depth
stack_depth = 0
class RecursiveCatcher(object):
def __init__(self):
global stack_depth
stack_depth = 0
self.no_of_getattribute_calls = 0
def __getattribute__(self, attr_name):
#Uncomment for debugging info:
#print 'Debug __getattribute__(' + type(self).__name__ + \
# "." + attr_name + ") dict=" + str(self.__dict__)
# We need something that is outside the scope of this class:
global stack_depth
stack_depth += 1
if stack_depth <= 10: # to prevent a stack overflow
self.no_of_getattribute_calls += 1
# Oops! We just accessed an attribute: no_of_getattribute_calls
# Guess what happens when self.no_of_getattribute_calls is
# accessed?
# Using 'object' directly because using super() here will also
# trigger a __getattribute__() call.
return object.__getattribute__(self, attr_name)
def my_method(self):
pass
def test_getattribute_is_a_bit_overzealous_sometimes(self):
catcher = self.RecursiveCatcher()
catcher.my_method()
global stack_depth
self.assertEqual(11, stack_depth)
# ------------------------------------------------------------------
class MinimalCatcher(object):
class DuffObject(object):
pass
def __init__(self):
self.no_of_getattr_calls = 0
def __getattr__(self, attr_name):
self.no_of_getattr_calls += 1
return self.DuffObject
def my_method(self):
pass
def test_getattr_ignores_known_attributes(self):
catcher = self.MinimalCatcher()
catcher.my_method()
self.assertEqual(0, catcher.no_of_getattr_calls)
def test_getattr_only_catches_unknown_attributes(self):
catcher = self.MinimalCatcher()
catcher.purple_flamingos()
catcher.free_pie()
self.assertEqual("DuffObject",
type(catcher.give_me_duff_or_give_me_death()).__name__)
self.assertEqual(3, catcher.no_of_getattr_calls)
# ------------------------------------------------------------------
class PossessiveSetter(object):
def __setattr__(self, attr_name, value):
new_attr_name = attr_name
if attr_name[-5:] == 'comic':
new_attr_name = "my_" + new_attr_name
elif attr_name[-3:] == 'pie':
new_attr_name = "a_" + new_attr_name
object.__setattr__(self, new_attr_name, value)
def test_setattr_intercepts_attribute_assignments(self):
fanboy = self.PossessiveSetter()
fanboy.comic = 'The Laminator, issue #1'
fanboy.pie = 'blueberry'
self.assertEqual('blueberry', fanboy.a_pie)
prefix = 'my'
self.assertEqual(
"The Laminator, issue #1",
getattr(fanboy, prefix + '_comic'))
# ------------------------------------------------------------------
class ScarySetter(object):
def __init__(self):
self.num_of_coconuts = 9
self._num_of_private_coconuts = 2
def __setattr__(self, attr_name, value):
new_attr_name = attr_name
if attr_name[0] != '_':
new_attr_name = "altered_" + new_attr_name
object.__setattr__(self, new_attr_name, value)
def test_it_modifies_external_attribute_as_expected(self):
setter = self.ScarySetter()
setter.e = "mc hammer"
self.assertEqual("mc hammer", setter.altered_e)
def test_it_mangles_some_internal_attributes(self):
setter = self.ScarySetter()
try:
coconuts = setter.num_of_coconuts
except AttributeError:
self.assertEqual(9, setter.altered_num_of_coconuts)
def test_in_this_case_private_attributes_remain_unmangled(self):
setter = self.ScarySetter()
self.assertEqual(2, setter._num_of_private_coconuts)
|
exu/poligon
|
python/python_koans/python2/koans/about_attribute_access.py
|
Python
|
mit
| 7,261
|
# This is the configuration file for your powerline-shell prompt
# Every time you make a change to this file, run install.py to apply changes
#
# For instructions on how to use the powerline-shell.py script, see the README
# Add, remove or rearrange these segments to customize what you see on the shell
# prompt. Any segment you add must be present in the segments/ directory
SEGMENTS = [
# Set the terminal window title to user@host:dir
# 'set_term_title',
# Show current virtual environment (see http://www.virtualenv.org/)
'virtual_env',
# Show the current user's username as in ordinary prompts
'username',
# Show the machine's hostname. Mostly used when ssh-ing into other machines
'hostname',
# Show a padlock when ssh-ing from another machine
'ssh',
# Show the current directory. If the path is too long, the middle part is
# replaced with ellipsis ('...')
'cwd',
# Show a padlock if the current user has no write access to the current
# directory
'read_only',
# Show the current git branch and status
'git',
# Show the current mercurial branch and status
'hg',
# Show the current svn branch and status
'svn',
# Show the current fossil branch and status
'fossil',
# Show number of running jobs
'jobs',
# Show the last command's exit code if it was non-zero
'exit_code',
# Shows a '#' if the current user is root, '$' otherwise
# Also, changes color if the last command exited with a non-zero error code
'root',
]
# Change the colors used to draw individual segments in your prompt
#THEME = 'default'
THEME = 'solarized-dark'
|
theno/fabsetup
|
fabsetup/fabfile-data/files/home/USERNAME/repos/powerline-shell/config.py
|
Python
|
mit
| 1,604
|
from daversy.utils import *
from daversy.db.object import UniqueKey, UniqueKeyColumn
class UniqueKeyColumnBuilder(object):
""" Represents a builder for a column in a unique key. """
DbClass = UniqueKeyColumn
XmlTag = 'constraint-column'
Query = """
SELECT cols.column_name, c.constraint_name, c.table_name, cols.position
FROM sys.user_constraints c, sys.user_cons_columns cols
WHERE c.constraint_name = cols.constraint_name
AND c.constraint_type = 'U'
ORDER BY c.constraint_name, cols.position
"""
PropertyList = odict(
('COLUMN_NAME', Property('name')),
('CONSTRAINT_NAME', Property('key-name', exclude=True)),
('TABLE_NAME', Property('table-name', exclude=True)),
('POSITION', Property('position', exclude=True))
)
@staticmethod
def addToState(state, column):
table = state.tables.get(column['table-name'])
if table:
key = table.unique_keys.get(column['key-name'])
if key:
key.columns[column.name] = column
class UniqueKeyBuilder(object):
""" Represents a builder for a unique key. """
DbClass = UniqueKey
XmlTag = 'unique-key'
Query = """
SELECT c.constraint_name AS name, c.table_name,
DECODE(c.deferrable, 'DEFERRABLE', lower(c.deferred)) AS defer_type,
DECODE(i.compression, 'ENABLED', i.prefix_length) AS "COMPRESS"
FROM sys.user_constraints c
LEFT JOIN sys.user_indexes i ON c.index_name = i.index_name
WHERE c.constraint_type = 'U'
ORDER BY c.constraint_name
"""
PropertyList = odict(
('NAME', Property('name')),
('DEFER_TYPE', Property('defer-type')),
('COMPRESS', Property('compress')),
('TABLE_NAME', Property('table-name', exclude=True))
)
@staticmethod
def addToState(state, key):
table = state.tables.get(key['table-name'])
if table:
table.unique_keys[key.name] = key
@staticmethod
def sql(key):
definition = "CONSTRAINT %(name)s UNIQUE ( %(columns)s )"
if key['defer-type']:
definition += " DEFERRABLE INITIALLY %(defer-type)s"
columns = ", ".join([column.name for column in key.columns.values()])
return render(definition, key, columns=columns)
|
kalyptorisk/daversy
|
src/daversy/db/oracle/unique_key.py
|
Python
|
gpl-2.0
| 2,398
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import json
from frappe.model.document import Document
from frappe.utils import getdate
class EmployeeAttendanceTool(Document):
pass
@frappe.whitelist()
def get_employees(date, department = None, branch = None, company = None):
attendance_not_marked = []
attendance_marked = []
filters = {"status": "Active", "date_of_joining": ["<=", date]}
for field, value in {'department': department,
'branch': branch, 'company': company}.items():
if value:
filters[field] = value
employee_list = frappe.get_list("Employee", fields=["employee", "employee_name"], filters=filters, order_by="employee_name")
marked_employee = {}
for emp in frappe.get_list("Attendance", fields=["employee", "status"],
filters={"attendance_date": date}):
marked_employee[emp['employee']] = emp['status']
for employee in employee_list:
employee['status'] = marked_employee.get(employee['employee'])
if employee['employee'] not in marked_employee:
attendance_not_marked.append(employee)
else:
attendance_marked.append(employee)
return {
"marked": attendance_marked,
"unmarked": attendance_not_marked
}
@frappe.whitelist()
def mark_employee_attendance(employee_list, status, date, leave_type=None, company=None):
employee_list = json.loads(employee_list)
for employee in employee_list:
if status == "On Leave" and leave_type:
leave_type = leave_type
else:
leave_type = None
if not company:
company = frappe.db.get_value("Employee", employee['employee'], "Company")
attendance=frappe.get_doc(dict(
doctype='Attendance',
employee=employee.get('employee'),
employee_name=employee.get('employee_name'),
attendance_date=getdate(date),
status=status,
leave_type=leave_type,
company=company
))
attendance.insert()
attendance.submit()
|
StrellaGroup/erpnext
|
erpnext/hr/doctype/employee_attendance_tool/employee_attendance_tool.py
|
Python
|
gpl-3.0
| 1,995
|
"""add password field
Revision ID: 18ae56e5a0f
Revises: 43e4e3402b9
Create Date: 2015-06-29 19:31:39.056586
"""
# revision identifiers, used by Alembic.
revision = '18ae56e5a0f'
down_revision = '43e4e3402b9'
from alembic import op
import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('user') as batch_op:
batch_op.add_column(
sa.Column('password', sa.String(length=254), nullable=True))
def downgrade():
with op.batch_alter_table('user') as batch_op:
batch_op.drop_column('password')
|
karlorg/drunken-octo-avenger
|
migrations/versions/18ae56e5a0f_add_password_field.py
|
Python
|
cc0-1.0
| 540
|
#!/usr/bin/env python2
import os
import socket
from struct import pack
from bithordetest import message, BithordeD, TestConnection
class EncryptedConnection(TestConnection):
def __init__(self, tgt):
TestConnection.__init__(self, tgt)
self.encryptor = None
self.decryptor = None
def fetch(self):
upper = TestConnection.fetch(self)
if self.decryptor:
upper = self.decryptor(upper)
return upper
def push(self, blob):
if self.encryptor:
blob = self.encryptor(blob)
return TestConnection.push(self, blob)
class Counter:
def __init__(self, iv):
self._block = [ord(c) for c in iv]
def __call__(self):
res = pack("B" * len(self._block), *self._block)
idx = len(self._block) - 1
while idx >= 0:
current = self._block[idx] + 1
if current == 256:
self._block[idx] = 0
idx -= 1
else:
self._block[idx] = current
break
return res
def setupCipher(type, key, iv):
if type == message.AES_CTR:
return AES.new(key, AES.MODE_CTR, counter=Counter(iv))
elif type == message.RC4:
return ARC4.new(hmac.HMAC(key, iv, digestmod=hashlib.sha256).digest())
else:
raise RuntimeError("Unsupported Cipher" + type)
def secure_auth_and_encryption(conn, name, key, cipher, validate_auth=True):
sendIv = os.urandom(16)
challenge = os.urandom(32)
try:
conn.send(
message.HandShake(name=name, protoversion=2, challenge=challenge))
greeting = conn.expect(message.HandShake(protoversion=2))
my_auth = hmac.HMAC(
key, greeting.challenge + chr(cipher) + sendIv, digestmod=hashlib.sha256).digest()
conn.send(message.HandShakeConfirmed(
cipher=cipher, cipheriv=sendIv, authentication=my_auth))
authentication = conn.expect(message.HandShakeConfirmed)
expected_auth = hmac.new(
key, challenge + chr(authentication.cipher) + authentication.cipheriv, hashlib.sha256).digest()
if validate_auth:
assert authentication.authentication == expected_auth
conn.encryptor = setupCipher(cipher, key, sendIv).encrypt
conn.decryptor = setupCipher(
authentication.cipher, key, authentication.cipheriv).decrypt
except (socket.error, StopIteration):
return None
return greeting.name
if __name__ == '__main__':
import base64
import hashlib
import hmac
from Crypto.Cipher import AES, ARC4
key = os.urandom(16)
bithorded = BithordeD(config={
'server.name': 'test_server',
'client.tester1': {
'cipher': 'RC4',
'key': base64.b64encode(key),
},
'client.tester2': {
'cipher': 'AES',
'key': base64.b64encode(key),
},
})
# Test auth failure for unknown key
conn = EncryptedConnection(bithorded)
assert secure_auth_and_encryption(
conn, name="anonymous", key=key, cipher=message.RC4) is None
# Test auth failure for wrong key
conn = EncryptedConnection(bithorded)
secure_auth_and_encryption(
conn, name="tester1", key='0' * len(key), cipher=message.RC4, validate_auth=False)
try:
conn.send(message.Ping(timeout=2000))
conn.expect(message.Ping)
assert False, "We really should not have gotten here"
except:
pass
# Test successful AES upstream / RC4 downstream
conn = EncryptedConnection(bithorded)
assert secure_auth_and_encryption(
conn, name="tester1", key=key, cipher=message.AES_CTR) == 'test_server'
conn.send(message.Ping(timeout=2000))
conn.expect(message.Ping)
conn.close()
# Test successful RC4 upstream / AES downstream
conn = EncryptedConnection(bithorded)
assert secure_auth_and_encryption(
conn, name="tester2", key=key, cipher=message.RC4) == 'test_server'
conn.send(message.Ping(timeout=2000))
conn.expect(message.Ping)
conn.close()
assert bithorded.is_alive()
|
rawler/bithorde
|
tests/proto/encryption.py
|
Python
|
apache-2.0
| 4,148
|
# -*- coding: utf-8 -*-
##
## __init__.py
## Login : <freyes@wampa>
## Started on Sun Jul 5 13:44:11 2009 Felipe Reyes
## $Id$
##
## Copyright (C) 2009 Felipe Reyes
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
|
freyes/hawck
|
hawck/data/__init__.py
|
Python
|
gpl-3.0
| 813
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from time import sleep
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from online_status.conf import online_status_settings as config
from online_status.utils import OnlineStatusJSONEncoder
# override settings so we don't have to wait so long during tests
@override_settings(
USERS_ONLINE__TIME_IDLE=2, USERS_ONLINE__TIME_OFFLINE=6,
USERS_ONLINE__ONLY_LOGGED_USERS=True
)
class TestOnlineStatus(TestCase):
def login_as(self, user):
password = user.password
if not password:
password = '123'
user.set_password(password)
user.save()
self.client.login(username=user.username, password=password)
def setUp(self):
self.user1 = User.objects.get_or_create(username='test1')[0]
self.user2 = User.objects.get_or_create(username='test2')[0]
self.user3 = User.objects.get_or_create(username='test3')[0]
def list_len(self, length):
users = cache.get(config.CACHE_USERS)
self.assertEqual(len(users), length)
def test_middleware(self):
self.client.get(reverse('online_users_test'))
useronline = cache.get(config.CACHE_PREFIX_USER % self.user1.pk)
self.assertEqual(useronline, None)
users = cache.get(config.CACHE_USERS)
self.assertEqual(users, None)
self.login_as(self.user1)
self.client.get(reverse('online_users_test'))
useronline = cache.get(config.CACHE_PREFIX_USER % self.user1.pk)
self.assertEqual(useronline.user, self.user1)
self.assertEqual(useronline.status, 1)
self.list_len(1)
self.client.logout()
self.login_as(self.user2)
self.client.get(reverse('online_users_test'))
useronline = cache.get(config.CACHE_PREFIX_USER % self.user2.pk)
self.assertEqual(useronline.user, self.user2)
self.assertEqual(useronline.status, 1)
self.list_len(2)
# idle works?
sleep(config.TIME_IDLE + 1)
self.client.get(reverse('online_users_test'))
useronline = cache.get(config.CACHE_PREFIX_USER % self.user1.pk)
self.assertEqual(useronline.user, self.user1)
self.assertEqual(useronline.status, 0)
self.list_len(2)
# offline works?
sleep(config.TIME_OFFLINE + 1)
self.client.get(reverse('online_users_test'))
useronline = cache.get(config.CACHE_PREFIX_USER % self.user1.pk)
self.assertEqual(useronline, None)
self.list_len(1)
def test_views(self):
response = self.client.get(reverse('online_users'))
self.assertEqual(response.status_code, 200)
online_users = cache.get(config.CACHE_USERS)
self.assertEqual(
response.content,
json.dumps(online_users, cls=OnlineStatusJSONEncoder).encode('utf-8')
)
def test_templatetags(self):
self.client.logout()
self.login_as(self.user1)
response = self.client.get(reverse('online_users_example'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'online_status/example.html')
# am i online?
useronline = cache.get(config.CACHE_PREFIX_USER % self.user1.pk)
self.assertEqual(useronline.user, self.user1)
self.assertEqual(useronline.status, 1)
# is user2 online?
useronline = cache.get(config.CACHE_PREFIX_USER % self.user2.pk)
self.assertEqual(useronline.user, self.user2)
self.assertEqual(useronline.status, 1)
self.list_len(2)
html = """<h1>Status for user "example"</h1>
<p>
offline</p>"""
self.assertContains(response, html, 1, 200)
h1 = """<h1>My status ("test1")</h1>"""
self.assertContains(response, h1, 1, 200)
html = """<h1>My status ("test1")</h1>
<p>
online</p>"""
self.assertContains(response, html, 1, 200)
html = """<h1>Users online</h1>\n\n<dl class="online_users">\n
\t<dt class="user">test2</dt><dd class="status">online</dd>\n
\t<dt class="user">test1</dt><dd class="status">online</dd>\n
</dl>"""
self.assertContains(response, html, 1, 200)
# test idle
sleep(config.TIME_IDLE + 1)
response = self.client.get(reverse('online_users_example'))
html = """<h1>Users online</h1>\n\n<dl class="online_users">\n
\t<dt class="user">test2</dt><dd class="status">idle</dd>\n
\t<dt class="user">test1</dt><dd class="status">online</dd>\n
</dl>"""
self.assertContains(response, html, 1, 200)
# test offline
sleep(config.TIME_OFFLINE + 1)
response = self.client.get(reverse('online_users_example'))
html = """<h1>Users online</h1>\n\n<dl class="online_users">\n
\t<dt class="user">test1</dt><dd class="status">online</dd>\n
</dl>"""
self.assertContains(response, html, 1, 200)
self.client.logout()
sleep(config.TIME_OFFLINE + 1)
response = self.client.get(reverse('online_users_example'))
html = """<h1>Users online</h1>\n\n<dl class="online_users">\n\n</dl>"""
self.assertContains(response, html, 1, 200)
|
hovel/django-online-status
|
online_status/tests.py
|
Python
|
unlicense
| 5,346
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.