repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
SCM-NV/qmworks-namd | test/test_coupling.py | Python | mit | 3,774 | 0.00106 | """Test the derivative coupling calculation."""
import os
import shutil
from typing import Sequence
import numpy as np
from assertionlib import assertion
from nanoqm.common import DictConfig, is_data_in_hdf5, retrieve_hdf5_data
from nanoqm.workflows.input_validation import process_input
from nanoqm.workflows.workflow_coupling import workflow_derivative_couplings
from .utilsTest import PATH_TEST, remove_files
def test_fast_couplings(tmp_path):
"""Check the derivative couplings workflow"""
run_derivative_coupling(tmp_path, 'input_fast_test_derivative_couplings.yml')
def test_unrestricted_alphas(tmp_path):
"""Test the derivative coupling for the alphas spin orbitals."""
run_derivative_coupling(tmp_path, 'input_couplings_alphas.yml', "alphas")
def test_unrestricted_betas(tmp_path):
"""Test the derivative coupling for the alphas spin orbitals."""
run_derivative_coupling(tmp_path, 'input_couplings_both.yml', "both")
def run_derivative_coupling(tmp_path: str, input_file: str, orbitals_type: str = "") -> None:
"""Check that the couplings run."""
path_input = PATH_TEST / input_file
config = process_input(path_input, 'derivative_couplings')
config["scratch_path"] = tmp_path
tmp_hdf5 = os.path.join(tmp_path, 'fast_couplings.hdf5')
shutil.copy(config.path_hdf5, tmp_hdf5)
config['path_hdf5'] = tmp_hdf5
config['write_overlaps'] = True
try:
check_results(config, tmp_hdf5, orbitals_type)
# Run the calculation again to test that the data is read from the hdf5
check_results(config, tmp_hdf5, orbitals_type)
finally:
remove_files()
def check_results(config: DictConfig, tmp_hdf5: str, orbitals_type: str) -> None:
"""Check the computed results stored in the HDF5 file."""
if orbitals_type != "both":
hamiltonians, _ = workflow_derivative_couplings(config)
check_couplings(config, tmp_hdf5, orbitals_type)
check_hamiltonians(hamiltonians)
else:
result_alphas, result_betas = wor | kflow_derivative_couplings(config)
| check_couplings(config, tmp_hdf5, "alphas")
check_couplings(config, tmp_hdf5, "betas")
check_hamiltonians(result_alphas[0])
check_hamiltonians(result_betas[0])
def check_couplings(config: DictConfig, tmp_hdf5: str, orbitals_type: str) -> None:
"""Check that the couplings have meaningful values."""
def create_paths(keyword: str) -> list:
return [os.path.join(orbitals_type, f'{keyword}_{x}')
for x in range(len(config.geometries) - 1)]
overlaps = create_paths('overlaps')
couplings = create_paths('coupling')
# Check that couplings and overlaps exists
assertion.truth(is_data_in_hdf5(tmp_hdf5, overlaps))
assertion.truth(is_data_in_hdf5(tmp_hdf5, couplings))
# All the elements are different of inifinity or nan
tensor_couplings = np.stack(retrieve_hdf5_data(tmp_hdf5, couplings))
assertion.truth(np.isfinite(tensor_couplings).all())
# Check that the couplings are anti-symetric
for mtx in tensor_couplings[:]:
assertion(np.allclose(mtx, -mtx.T))
# Check that there are not NaN
assertion.truth(not np.all(np.isnan(tensor_couplings)))
def check_hamiltonians(hamiltonians: Sequence[str]) -> None:
"""Check that the hamiltonians were written correctly."""
energies = np.stack([np.diag(np.loadtxt(ts[1])) for ts in hamiltonians])
couplings = np.stack([np.loadtxt(ts[0]) for ts in hamiltonians])
# check that energies and couplings are finite values
assertion.truth(np.isfinite(energies).all())
assertion.truth(np.isfinite(couplings).all())
# Check that the couplings diagonal is zero
assertion.truth(abs(np.einsum('jii->', couplings)) < 1e-16)
|
GomSpace/libcsp | examples/python_bindings_example_client.py | Python | lgpl-2.1 | 1,900 | 0.003158 | #!/usr/bin/python | 3
# Build required code:
# $ ./examples/buildall.py
#
# Start zmqproxy (only one instance)
# $ ./build/zmqproxy
#
# Run client against server using ZMQ:
# $ LD_LIBRARY_PATH=build PYTHONPATH=build python3 examples/python_bindings_example_client.py -z localhost
#
import os
import time
import sys
import argparse
import libcsp_py3 as libcsp
def getOptions():
parser = argparse.ArgumentParser(description="Parses command | .")
parser.add_argument("-a", "--address", type=int, default=10, help="Local CSP address")
parser.add_argument("-c", "--can", help="Add CAN interface")
parser.add_argument("-z", "--zmq", help="Add ZMQ interface")
parser.add_argument("-s", "--server-address", type=int, default=27, help="Server address")
parser.add_argument("-R", "--routing-table", help="Routing table")
return parser.parse_args(sys.argv[1:])
if __name__ == "__main__":
options = getOptions()
libcsp.init(options.address, "host", "model", "1.2.3", 10, 300)
if options.can:
libcsp.can_socketcan_init(options.can)
if options.zmq:
libcsp.zmqhub_init(options.address, options.zmq)
libcsp.rtable_load("0/0 ZMQHUB")
if options.routing_table:
libcsp.rtable_load(options.routing_table)
libcsp.route_start_task()
time.sleep(0.2) # allow router task startup
print("Connections:")
libcsp.print_connections()
print("Routes:")
libcsp.print_routes()
print("CMP ident:", libcsp.cmp_ident(options.server_address))
print("Ping: %d mS" % libcsp.ping(options.server_address))
# transaction
outbuf = bytearray().fromhex('01')
inbuf = bytearray(1)
print ("Exchange data with server using csp_transaction ...")
libcsp.transaction(0, options.server_address, 10, 1000, outbuf, inbuf)
print (" got reply from server [%s]" % (''.join('{:02x}'.format(x) for x in inbuf)))
|
stormi/tsunami | src/primaires/objet/types/assiette.py | Python | bsd-3-clause | 1,930 | 0.004145 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDI | NG NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le type Assiette."""
from .conteneur_nourriture import ConteneurNourriture
class Assiette(ConteneurNourriture):
"""Type d'objet: assiette
"""
nom_type = "assiette"
| def __init__(self, cle=""):
"""Constructeur de l'objet"""
ConteneurNourriture.__init__(self, cle)
self.poids_max = 12
|
uwosh/uwosh.thememain | uwosh/thememain/interfaces.py | Python | gpl-2.0 | 176 | 0.017045 | from zope.interface import Interface
class ISectionNavigation(Interface):
"""
A Folder can impleme | nt this to specify that it | is a section navigation
folder
""" |
PetePriority/home-assistant | homeassistant/components/keyboard/__init__.py | Python | apache-2.0 | 2,078 | 0 | """
Provides functionality to emulate keyboard presses on host machine.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/keyboard/
"""
import voluptuous as vol
from homeassistant.const import (
SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_UP)
REQUIREMENTS = ['pyuserinput==0.1.11']
DOMAIN = 'keyboard'
TAP_KEY_SCHEMA = vol.Schema({})
def setup(hass, config):
"""Listen for keyboard events."""
import pykeyboard # pylint: disable=import-error
keyboard = pykeyboard.PyKeyboard()
keyboard.special_key_assignment()
hass.services.register(DOMAIN, SERVICE_VOLUME_UP,
lambda service:
keyboard.tap_key(keyboard.volume_up_key),
schema=TAP_KEY_SCHEMA)
hass.services.register(DOMAIN, SERVICE_VOLUME_DOWN,
lambda service:
| keyboard | .tap_key(keyboard.volume_down_key),
schema=TAP_KEY_SCHEMA)
hass.services.register(DOMAIN, SERVICE_VOLUME_MUTE,
lambda service:
keyboard.tap_key(keyboard.volume_mute_key),
schema=TAP_KEY_SCHEMA)
hass.services.register(DOMAIN, SERVICE_MEDIA_PLAY_PAUSE,
lambda service:
keyboard.tap_key(keyboard.media_play_pause_key),
schema=TAP_KEY_SCHEMA)
hass.services.register(DOMAIN, SERVICE_MEDIA_NEXT_TRACK,
lambda service:
keyboard.tap_key(keyboard.media_next_track_key),
schema=TAP_KEY_SCHEMA)
hass.services.register(DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK,
lambda service:
keyboard.tap_key(keyboard.media_prev_track_key),
schema=TAP_KEY_SCHEMA)
return True
|
anhstudios/swganh | data/scripts/templates/object/static/structure/general/shared_streetlamp_medium_style_01.py | Python | mit | 464 | 0.047414 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
resu | lt = Static()
result.template = "object/static/structure/general/shared_streetlamp_medium_style_01.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return re | sult |
KirtoXX/Security_Camera | ssd_mobilenet/object_detection/trainer.py | Python | apache-2.0 | 12,596 | 0.004843 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Detection model trainer.
This file provides a generic training method that can be used to train a
DetectionModel.
"""
import functools
import tensorflow as tf
from object_detection.builders import optimizer_builder
from object_detection.builders import preprocessor_builder
from object_detection.core import batcher
from object_detection.core import preprocessor
from object_detection.core import standard_fields as fields
from object_detection.utils | import ops as util_ops
from object_detection.utils import variables_helper
from d | eployment import model_deploy
slim = tf.contrib.slim
def _create_input_queue(batch_size_per_clone, create_tensor_dict_fn,
batch_queue_capacity, num_batch_queue_threads,
prefetch_queue_capacity, data_augmentation_options):
"""Sets up reader, prefetcher and returns input queue.
Args:
batch_size_per_clone: batch size to use per clone.
create_tensor_dict_fn: function to create tensor dictionary.
batch_queue_capacity: maximum number of elements to store within a queue.
num_batch_queue_threads: number of threads to use for batching.
prefetch_queue_capacity: maximum capacity of the queue used to prefetch
assembled batches.
data_augmentation_options: a list of tuples, where each tuple contains a
data augmentation function and a dictionary containing arguments and their
values (see preprocessor.py).
Returns:
input queue: a batcher.BatchQueue object holding enqueued tensor_dicts
(which hold images, boxes and targets). To get a batch of tensor_dicts,
call input_queue.Dequeue().
"""
tensor_dict = create_tensor_dict_fn()
tensor_dict[fields.InputDataFields.image] = tf.expand_dims(
tensor_dict[fields.InputDataFields.image], 0)
images = tensor_dict[fields.InputDataFields.image]
float_images = tf.to_float(images)
tensor_dict[fields.InputDataFields.image] = float_images
if data_augmentation_options:
tensor_dict = preprocessor.preprocess(tensor_dict,
data_augmentation_options)
input_queue = batcher.BatchQueue(
tensor_dict,
batch_size=batch_size_per_clone,
batch_queue_capacity=batch_queue_capacity,
num_batch_queue_threads=num_batch_queue_threads,
prefetch_queue_capacity=prefetch_queue_capacity)
return input_queue
def _get_inputs(input_queue, num_classes):
"""Dequeue batch and construct inputs to object detection model.
Args:
input_queue: BatchQueue object holding enqueued tensor_dicts.
num_classes: Number of classes.
Returns:
images: a list of 3-D float tensor of images.
locations_list: a list of tensors of shape [num_boxes, 4]
containing the corners of the groundtruth boxes.
classes_list: a list of padded one-hot tensors containing target classes.
masks_list: a list of 3-D float tensors of shape [num_boxes, image_height,
image_width] containing instance masks for objects if present in the
input_queue. Else returns None.
"""
read_data_list = input_queue.dequeue()
label_id_offset = 1
def extract_images_and_targets(read_data):
image = read_data[fields.InputDataFields.image]
location_gt = read_data[fields.InputDataFields.groundtruth_boxes]
classes_gt = tf.cast(read_data[fields.InputDataFields.groundtruth_classes],
tf.int32)
classes_gt -= label_id_offset
classes_gt = util_ops.padded_one_hot_encoding(indices=classes_gt,
depth=num_classes, left_pad=0)
masks_gt = read_data.get(fields.InputDataFields.groundtruth_instance_masks)
return image, location_gt, classes_gt, masks_gt
return zip(*map(extract_images_and_targets, read_data_list))
def _create_losses(input_queue, create_model_fn):
"""Creates loss function for a DetectionModel.
Args:
input_queue: BatchQueue object holding enqueued tensor_dicts.
create_model_fn: A function to create the DetectionModel.
"""
detection_model = create_model_fn()
(images, groundtruth_boxes_list, groundtruth_classes_list,
groundtruth_masks_list
) = _get_inputs(input_queue, detection_model.num_classes)
images = [detection_model.preprocess(image) for image in images]
images = tf.concat(images, 0)
if any(mask is None for mask in groundtruth_masks_list):
groundtruth_masks_list = None
detection_model.provide_groundtruth(groundtruth_boxes_list,
groundtruth_classes_list,
groundtruth_masks_list)
prediction_dict = detection_model.detectFaces(images)
losses_dict = detection_model.loss(prediction_dict)
for loss_tensor in losses_dict.values():
tf.losses.add_loss(loss_tensor)
def train(create_tensor_dict_fn, create_model_fn, train_config, master, task,
num_clones, worker_replicas, clone_on_cpu, ps_tasks, worker_job_name,
is_chief, train_dir):
"""Training function for detection models.
Args:
create_tensor_dict_fn: a function to create a tensor input dictionary.
create_model_fn: a function that creates a DetectionModel and generates
losses.
train_config: a train_pb2.TrainConfig protobuf.
master: BNS name of the TensorFlow master to use.
task: The task id of this training instance.
num_clones: The number of clones to run per machine.
worker_replicas: The number of work replicas to train with.
clone_on_cpu: True if clones should be forced to run on CPU.
ps_tasks: Number of parameter server tasks.
worker_job_name: Name of the worker job.
is_chief: Whether this replica is the chief replica.
train_dir: Directory to write checkpoints and training summaries to.
"""
detection_model = create_model_fn()
data_augmentation_options = [
preprocessor_builder.build(step)
for step in train_config.data_augmentation_options]
with tf.Graph().as_default():
# Build a configuration specifying multi-GPU and multi-replicas.
deploy_config = model_deploy.DeploymentConfig(
num_clones=num_clones,
clone_on_cpu=clone_on_cpu,
replica_id=task,
num_replicas=worker_replicas,
num_ps_tasks=ps_tasks,
worker_job_name=worker_job_name)
# Place the global step on the device storing the variables.
with tf.device(deploy_config.variables_device()):
global_step = slim.create_global_step()
with tf.device(deploy_config.inputs_device()):
input_queue = _create_input_queue(train_config.batch_size // num_clones,
create_tensor_dict_fn,
train_config.batch_queue_capacity,
train_config.num_batch_queue_threads,
train_config.prefetch_queue_capacity,
data_augmentation_options)
# Gather initial summaries.
summaries = set(tf.get_collection(tf.GraphKeys.SUMMARIES))
global_summaries = set([])
model_fn = functools.partial(_create_losses,
create_model_fn=create_model_fn)
clones = model_deploy.create_clones(deploy_config, model_fn, [input_queue])
first_clone_scope = clones[0].scope
# Gather update_ops from the first clone. These contain, for example,
# the updates for the batch_norm variables created by model_fn.
|
tensorflow/gan | tensorflow_gan/examples/progressive_gan/networks.py | Python | apache-2.0 | 15,202 | 0.005328 | # coding=utf-8
# Copyright 2022 The TensorFlow GAN Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python2 python3
"""Generator and discriminator for a progressive GAN model.
See https://arxiv.org/abs/1710.10196 for details about the model.
See https://github.com/tkarras/progressive_growing_of_gans for the original
theano implementation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import tensorflow.compat.v1 as tf
from tensorflow_gan.examples.progressive_gan import layers
class ResolutionSchedule(object):
"""Image resolution upscaling schedule."""
def __init__(self, start_resolutions=(4, 4), scale_base=2, num_resolutions=4):
"""Initializer.
Args:
start_resolutions: An tuple of integers of HxW format for start image
resolutions. Defaults to (4, 4).
scale_base: An integer of resolution base multiplier. Defaults to 2.
num_resolutions: An integer of how many progressive resolutions (including
`start_resolutions`). Defaults to 4.
"""
self._start_resolutions = start_resolutions
self._scale_base = scale_base
self._num_resolutions = num_resolutions
@property
def start_resolutions(self):
return tuple(self._start_resolutions)
@property
def scale_base(self):
return self._scale_base
@property
def num_resolutions(self):
return self._num_resolutions
@property
def final_resolutions(self):
"""Returns the final resolutions."""
return tuple([
r * self._scale_base**(self._num_resolutions - 1)
for r in self._start_resolutions
])
def scale_factor(self, block_id):
"""Returns the scale factor for network block `block_id`."""
if block_id < 1 or block_id > self._num_resolutions:
raise ValueError('`block_id` must be in [1, {}]'.format(
self._num_resolutions))
return self._scale_base**(self._num_resolutions - block_id)
def block_name(block_id):
"""Returns the scope name for the network block `block_id`."""
return 'progressive_gan_block_{}'.format(block_id)
def min_total_num_images(stable_stage_num_images, transition_stage_num_images,
num_blocks):
"""Returns the minimum total number of images.
Computes the minimum total number of images required to reach the desired
`resolution`.
Args:
stable_stage_num_images: Number of images in the stable stage.
transition_stage_num_images: Number of images in the transition stage.
num_blocks: Number of network blocks.
Returns:
An integer of the minimum total number of images.
"""
return (num_blocks * stable_stage_num_images +
(num_blocks - 1) * transition_stage_num_images)
def compute_progress(current_image_id, stable_stage_num_images,
transition_stage_num_images, num_blocks):
"""Computes the training progress.
The training alternates between stable phase and transition phase.
The `progress` indicates the training progress, i.e. the training is at
- a stable phase p if progress = p
- a transition stage between p and p + 1 if progress = p + fraction
where p = 0,1,2.,...
Note the max value of progress is `num_blocks` - 1.
In terms of LOD (of the original implementation):
progress = `num_blocks` - 1 - LOD
Args:
current_image_id: An scalar integer `Tensor` of the current image id, count
from 0.
stable_stage_num_images: An integer representing the number of images in
each stable stage.
transition_stage_num_images: An integer representing the number of images in
each transition stage.
num_blocks: Number of network blocks.
Returns:
A scalar float `Tensor` of the training progress.
"""
# Note when current_image_id >= min_total_num_i | mages - 1 (which means we
# are already at the highest resolution), we want to keep progress constant.
# Therefore, cap current_image_id here.
capped_current_image_id = tf.minimum(
current_image_id,
min_total_num_images(stable_stage_num_images, transition_stage_num_images,
| num_blocks) - 1)
stage_num_images = stable_stage_num_images + transition_stage_num_images
progress_integer = tf.math.floordiv(capped_current_image_id, stage_num_images)
progress_fraction = tf.maximum(
0.0,
tf.cast(
tf.math.mod(capped_current_image_id, stage_num_images) -
stable_stage_num_images,
dtype=tf.float32) /
tf.cast(transition_stage_num_images, dtype=tf.float32))
return tf.cast(progress_integer, dtype=tf.float32) + progress_fraction
def _generator_alpha(block_id, progress):
"""Returns the block output parameter for the generator network.
The generator has N blocks with `block_id` = 1,2,...,N. Each block
block_id outputs a fake data output(block_id). The generator output is a
linear combination of all block outputs, i.e.
SUM_block_id(output(block_id) * alpha(block_id, progress)) where
alpha(block_id, progress) = _generator_alpha(block_id, progress). Note it
garantees that SUM_block_id(alpha(block_id, progress)) = 1 for any progress.
With a fixed block_id, the plot of alpha(block_id, progress) against progress
is a 'triangle' with its peak at (block_id - 1, 1).
Args:
block_id: An integer of generator block id.
progress: A scalar float `Tensor` of training progress.
Returns:
A scalar float `Tensor` of block output parameter.
"""
return tf.maximum(0.0,
tf.minimum(progress - (block_id - 2), block_id - progress))
def _discriminator_alpha(block_id, progress):
"""Returns the block input parameter for discriminator network.
The discriminator has N blocks with `block_id` = 1,2,...,N. Each block
block_id accepts an
- input(block_id) transformed from the real data and
- the output of block block_id + 1, i.e. output(block_id + 1)
The final input is a linear combination of them,
i.e. alpha * input(block_id) + (1 - alpha) * output(block_id + 1)
where alpha = _discriminator_alpha(block_id, progress).
With a fixed block_id, alpha(block_id, progress) stays to be 1
when progress <= block_id - 1, then linear decays to 0 when
block_id - 1 < progress <= block_id, and finally stays at 0
when progress > block_id.
Args:
block_id: An integer of generator block id.
progress: A scalar float `Tensor` of training progress.
Returns:
A scalar float `Tensor` of block input parameter.
"""
return tf.clip_by_value(block_id - progress, 0.0, 1.0)
def blend_images(x, progress, resolution_schedule, num_blocks):
"""Blends images of different resolutions according to `progress`.
When training `progress` is at a stable stage for resolution r, returns
image `x` downscaled to resolution r and then upscaled to `final_resolutions`,
call it x'(r).
Otherwise when training `progress` is at a transition stage from resolution
r to 2r, returns a linear combination of x'(r) and x'(2r).
Args:
x: An image `Tensor` of NHWC format with resolution `final_resolutions`.
progress: A scalar float `Tensor` of training progress.
resolution_schedule: An object of `ResolutionSchedule`.
num_blocks: An integer of number of blocks.
Returns:
An image `Tensor` which is a blend of images of different resolutions.
"""
x_blend = []
for block_id in range(1, num_blocks + 1):
alpha = _generator_alpha(block_id, progress)
scale = resolution_schedule.scale_factor(block_id)
x_blend.append(alpha * layers.upscale(layers.downscale(x, scale), scale))
return tf.add_n(x_blend)
def num_filters(block_id, fmap_base=4096, fmap_d |
reanahub/reana-client | tests/test_version.py | Python | mit | 444 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2017, 2018, 2020, 2021 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the | MIT License; see LICENSE file for more details.
"""REANA client tests."""
from __future__ import absolute_import, print_function
def test_version():
"""Test version import."""
from reana_client import __version | __
assert __version__
|
MiracleWong/PythonBasic | LiaoXueFeng/buildin/use_urllib.py | Python | mit | 340 | 0.002941 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'MiracleWong'
from urllib import request
with request.urlopen('http://news-at.zh | ihu.com/api/4/news/latest') as f:
data = f.read()
print('Status:', f.status, f.reason)
for k, v in f.getheaders():
print('%s: %s' % (k, v))
print('Data:', data.decode('utf- | 8')) |
aidin36/beneath-a-binary-sky | src/world/square_iterator.py | Python | gpl-3.0 | 3,416 | 0.001464 | # This file is part of Beneath a Binary Sky.
# Copyright (C) 2016, Aidin Gharibnavaz <aidin@aidinhut.com>
#
# Beneath a Binary Sky is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Beneath a Binary Sky is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICU | LAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B | eneath a Binary Sky. If not, see
# <http://www.gnu.org/licenses/>.
import math
class SquareInterator:
'''Iterates squares around a point.
Note that first returning point is the center point itself.
example usage:
for x, y in SquareIterator((4, 3), (100, 100))
'''
ITERATION_DIRECTIONS = [(1, 0), (0, 1), (-1, 0), (0, -1)]
def __init__(self, center_point, map_size, max_radios=None):
'''
@param center_point: Point to iterate around.
@param map_size: Size of the current map (world).
@keyword max_radios: If provided, it iterates to this maximum distance
from the center. For example, if center is on Y 3, and max_radios is
2, it will goes up to Y 5.
'''
self._center_x = center_point[0]
self._center_y = center_point[1]
self._map_size_x = map_size[0]
self._map_size_y = map_size[1]
self._max_raios = max_radios
def __iter__(self):
return next(self)
def __next__(self):
# First point is the center itself.
yield (self._center_x, self._center_y)
# The walking algorithm:
# It iterates points around the center, in a shape of square.
# First, it finds the upper left corner of the square. Then, it moves to the right.
# After reaching the right edge, it moves down. Then, left, then, up.
# After that, it increase the size of the square's side by one, and iterates again.
# How many squares to walk in each row? e.g. Square's side size.
length = 0
while True:
square_found = False
length += 2
corner_x = self._center_x - math.floor(length / 2)
corner_y = self._center_y - math.floor(length / 2)
current_x = corner_x
current_y = corner_y
for direction in SquareInterator.ITERATION_DIRECTIONS:
for i in range(length):
current_x += direction[0]
current_y += direction[1]
if (current_x < 0 or current_x > self._map_size_x - 1 or
current_y < 0 or current_y > self._map_size_y - 1):
# Out of map.
continue
square_found = True
yield (current_x, current_y)
if not square_found:
# If nothing found after a complete loop (e.g. we iterate all possible points.)
raise StopIteration()
if self._max_raios is not None and (length / 2) >= self._max_raios:
# We iterated to the maximum requested radios.
raise StopIteration()
|
Qwlouse/Findeco | node_storage/tests/test_nodepath_cache.py | Python | gpl-3.0 | 3,574 | 0.002518 | #!/usr/bin/python
# coding=utf-8
# region License
# Findeco is dually licensed under GPLv3 or later and MPLv2.
#
################################################################################
# Copyright (c) 2012 Klaus Greff <klaus.greff@gmx.net>
# This file is part of Findeco.
#
# Findeco is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# Findeco is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Findeco. If not, see <http://www.gnu.org/licenses/>.
################################################################################
#
################################################################################
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#endregion #####################################################################
from __future__ import division, print_function, unicode_literals
from django.test import TestCase
from ..models import PathCache, Node
from node_storage.factory import create_slot, create_structureNode, create_argument
from n | ode_storage.path_helpers import get_root_node
class NodePathCacheTest(TestCase):
def setUp(self):
self.root = get_root_node()
def test_root_node_has_path(self):
r = PathCache.objects.get(path='').node
self.assertEqual(r, self.root)
p = PathCache.objects.get(node=self.root).path
self.assert | Equal(p, '')
def test_append_child_slot_adds_to_path_cache(self):
slot = create_slot('Foo')
self.root.append_child(slot)
self.assertEqual(slot, PathCache.objects.get(path='Foo').node)
self.assertEqual('Foo', PathCache.objects.get(node=slot).path)
def test_append_child_structure_node_adds_to_path_cache(self):
slot = create_slot('Foo')
self.root.append_child(slot)
sn = create_structureNode("Foobarbaz")
slot.append_child(sn)
self.assertEqual(sn, PathCache.objects.get(path='Foo.1').node)
self.assertEqual('Foo.1', PathCache.objects.get(node=sn).path)
def test_append_child_slot_adds_all_paths(self):
slot = create_slot('Foo')
self.root.append_child(slot)
sn1 = create_structureNode("Foobarbaz1")
slot.append_child(sn1)
sn2 = create_structureNode("Foobarbaz2")
slot.append_child(sn2)
slot_t = create_slot('Ba')
sn1.append_child(slot_t)
sn2.append_child(slot_t)
sn_test = create_structureNode("Barbaren")
slot_t.append_child(sn_test)
self.assertEqual(sn_test, PathCache.objects.get(path='Foo.1/Ba.1').node)
self.assertEqual(sn_test, PathCache.objects.get(path='Foo.2/Ba.1').node)
def test_creating_an_argument_adds_to_path_cache(self):
slot = create_slot('Foo')
self.root.append_child(slot)
sn = create_structureNode("Foobarbaz1")
slot.append_child(sn)
a = create_argument(sn, arg_type='con')
node_a = Node.objects.get(id=a.id)
self.assertEqual(node_a, PathCache.objects.get(path='Foo.1.con.1').node)
|
vlebedynskyi/Automator | test/test_tools/test_http_utils.py | Python | apache-2.0 | 4,938 | 0.000203 | """
Copyright 2014 Vitalii Lebedynskyi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from unittest import TestCase
from tools.http_utils import UrlBuilder
class TestHttpBuilder(TestCase):
def test_constructor_http(self):
builder = UrlBuilder("http://google.com")
self.assertEqual(builder.url_scheme, "http://")
self.assertEqual(builder.base, "google.com")
def test_constructor_https(self):
builder = UrlBuilder("https://google.com")
self.assertEqual(builder.url_scheme, "https://")
self.assertEqual(builder.base, "google.com")
builder = UrlBuilder("https://google.com/")
self.assertEqual(builder.url_scheme, "https://")
self.assertEqual(builder.base, "google.com")
def test_append_path(self):
builder = UrlBuilder("http://google.com")
builder.append_path("test/path/test")
self.assertEqual(builder.path_parts, ["test", "path", "test"])
builder.append_path("/one/two/test/")
self.assertEqual(builder.path_parts, ["test", "path", "test",
"one", "two", "test"])
def test_set_arguments(self):
args = {"empty": None, "not_empty": "value", "int": 12, None: None}
builder = UrlBuilder("http://google.com")
builder.add_arguments(args=args)
builder_arguments = builder.arguments
self.assertIn("not_empty", builder_arguments)
self.assertIn("int", builder_arguments)
self.assertNotIn("empty", builder_arguments)
self.assertNotIn(None, builder_arguments)
self.assertEqual(builder_arguments["not_empty"], "value")
self.assertEqual(builder_arguments["int"], 12)
def test_set_scheme(self):
builder = UrlBuilder("http://google.com")
builder.scheme("http://")
self.assertEqual(builder.url_scheme, "http://")
builder.scheme("https://")
self.assertEqual(builder.url_scheme, "https://")
builder.scheme("file://")
self.assertEqual(builder.url_scheme, "file://")
def test_build_with_args(self):
url = "http://google.com"
args = {"key": "value", "key_int": 12, "empty": None}
path = "/Some/test///Path//"
builder = UrlBuilder(url)
builder.add_arguments(args)
builder.append_path(path)
builder.scheme("https://")
| final_url = "https://google.com/Some/test/Path?key=value&key_int=12"
final_url2 = "https://google.com/Some/test/Path?key_int=12&key=value"
result = builder.build()
try:
self.assertEqual(result, final_url)
except AssertionError:
self.assertEqual(result, final_url2)
def test_build_without_args(self):
url = "http://google.com"
path = "/Some/test///Path//"
buil | der = UrlBuilder(url)
builder.append_path(path)
builder.scheme("https://")
final_url = "https://google.com/Some/test/Path"
self.assertEqual(builder.build(), final_url)
def test_build_without_path(self):
url = "http://google.com"
args = {"key": "value", "key_int": 12, "empty": None}
builder = UrlBuilder(url)
builder.add_arguments(args)
builder.scheme("https://")
final_url = "https://google.com?key=value&key_int=12"
final_url2 = "https://google.com?key_int=12&key=value"
result = builder.build()
try:
self.assertEqual(result, final_url)
except AssertionError:
self.assertEqual(result, final_url2)
def test_gen_path(self):
url = "http://google.com"
path = "//test/test//test//"
builder = UrlBuilder(url)
builder.append_path(path)
builder.append_path("more/more2")
expected_result = "test/test/test/more/more2"
self.assertEqual(builder.gen_path(), expected_result)
def test_gen_args(self):
args = {"encoded": "normal", "not_encoded": "http://Hello"}
url = "http://google.com"
builder = UrlBuilder(url)
builder.add_arguments(args)
result_args = "encoded=normal¬_encoded=http%3A%2F%2FHello"
result_args2 = "not_encoded=http%3A%2F%2FHello&encoded=normal"
generated_args = builder.gen_args()
try:
self.assertEqual(generated_args, result_args)
except AssertionError:
self.assertEqual(generated_args, result_args2) |
Darthkpo/xtt | openpyxl/cell/read_only.py | Python | mit | 3,850 | 0.001818 | from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
from openpyxl.compat import unicode
from openpyxl.cell import Cell
from openpyxl.utils.datetime import from_excel
from openpyxl.styles import is_date_format, Style
from openpyxl.styles.numbers import BUILTIN_FORMATS
from openpyxl.styles.styleable import StyleableObject
class ReadOnlyCell(object):
__slots__ = ('parent', 'row', 'column', '_value', 'data_type', '_style_id')
def __init__(self, sheet, row, column, value, data_type='n', style_id=None):
self.parent = sheet
self._value = None
self.row = row
self.column = column
self.data_type = data_type
self.value = value
self._style_id = style_id
def __eq__(self, other):
for a in self.__slots__:
if getattr(self, a) != getattr(other, a):
return
return True
def __ne__(self, other):
return not self.__eq__(other)
@property
def shared_strings(self):
return self.parent.shared_strings
@property
def base_date(self):
return self.parent.base_date
@property
def coordinate(self):
if self.row is None or self.column is None:
raise AttributeError("Empty cells have no coordinates")
return "{1}{0}".format(self.row, self.column)
@property
def style_id(self):
if not self._style_id:
return
return self.parent.parent._cell_styles[self._style_id]
@property
def number_format(self):
if not self.style_id:
return
_id = self.style_id.number_format
if _id < 164:
return BUILTIN_FORMATS.get(_id, "General")
else:
return self.parent.parent._number_formats[_id - 164]
@property
def font(self):
_id = self.style_id.font
return self.parent.parent._fonts[_id]
@property
def fill(self):
_id = self.style_id.fill
return self.parent.parent._fills[_id]
@property
def border(self):
_id = self.style_id.border
return self.parent.parent._borders[_id]
@property
def alignment(self):
_id = self.style_id.alignment
return self.parent.parent._alignments[_id]
@property
def protection(self):
_id = self.style_id.protection
return self.parent.parent._protections[_id]
@property
def is_date(self):
return self.data_type == 'n' and is_date_format(self.number_format)
@property
def internal_value(self):
return self._value
@property
def value(self):
if self._value is None:
return
if self.data_type == 'n':
if is_date_format(self.number_format):
return from_excel(self._value, self.base_date)
return self._value
if self.data_type == 'b':
return self._value == '1'
elif self.data_type in(Cell.TYPE_INLINE, Cell.TYPE_FORMULA_CACHE_STRING):
return unicode(self._value)
elif self.data_type == 's':
return unicode(self.shared_strings[int(self._value)])
return self._value
@value.setter
def value(self, value):
if self._value is not None:
| raise AttributeError("Cell is read only")
if value is None:
self.data_type = 'n'
elif self.data_type == 'n':
try:
value = int(value)
except ValueError:
value = float(value)
self._value = value
@property
def style(self):
return Style(font=self.font, alignment=self.alignment,
fill=self.fill, number_format=self.number_format, border=self.border,
| protection=self.protection)
EMPTY_CELL = ReadOnlyCell(None, None, None, None)
|
brainwane/zulip | zilencer/management/commands/print_initial_password.py | Python | apache-2.0 | 1,088 | 0.002757 | from argparse import ArgumentParser
from typing import Any
from zerver.lib.init | ial_password import initial_password
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.users import get_api_key
class Command(ZulipBaseComm | and):
help = "Print the initial password and API key for accounts as created by populate_db"
fmt = '%-30s %-16s %-32s'
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('emails', metavar='<email>', type=str, nargs='*',
help="email of user to show password and API key for")
self.add_realm_args(parser)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
print(self.fmt % ('email', 'password', 'API key'))
for email in options['emails']:
if '@' not in email:
print(f'ERROR: {email} does not look like an email address')
continue
user = self.get_user(email, realm)
print(self.fmt % (email, initial_password(email), get_api_key(user)))
|
rbessick5/PrairieLearn | exampleCourse/questions/examplePLDrawingSimpleTutorial/server.py | Python | agpl-3.0 | 4,964 | 0.006245 | import random
import math
import numpy as np
def generate(data):
canvas_width = 300
canvas_height = 300
data["params"]["canvas_width"] = canvas_width
data["params"]["canvas_height"] = canvas_height
# Creating all variables, even knowing that just a few would be used in this Example
# Could improve later
# ------------------------------------------
# point
# ------------------------------------------
x0 = 40
y0 = 40
interval = np.linspace(100,200,11)
xP = random.choice(interval)
data["params"]["xP"] = xP
yP = random.choice(interval)
data["params"]["xP"] = xP
data["params"]["yP"] = yP
data["params"]["x0"] = x0
data["params"]["y0"] = canvas_height - y0
# ------------------------------------------
xA = 40
xD = 260
xB = random.randint(xA+20,xD/2)
xC = random.randint(xB + 30,xD-20)
y = 160
w = xC-xB
xE = xB + w/2
data["params"]["xA"] = xA
data["params"]["xB"] = xB
data["params"]["xC"] = xC
data["params"]["xD"] = xD
data["params"]["xE"] = xE
data["params"]["y"] = y
data["params"]["w"] = w
# ------------------------------------------
# vector
# ------------------------------------------
allangles = [0,30,45,60,90]
alpha = random.choice(allangles) if random.choice([0,1]) else -random.choice(allangles)
data["params"]["alpha"] = alpha
alphas = -alpha
# ------------------------------------------
# arc vector
# ------------------------------------------
if random.choice([0,1]):
data["params"]["arc_orientation"] = " | true"
arc_orientation_text = "clockwise"
else:
dat | a["params"]["arc_orientation"] = "false"
arc_orientation_text = "counter-clockwise"
# ------------------------------------------
# distributed load
# ------------------------------------------
wmin = 20
wmax = 60
angle = random.choice([0,180])
if random.choice([0,1]):
comp = "larger "
if angle == 0:
data["params"]["w1"] = wmax
data["params"]["w2"] = wmin
else:
data["params"]["w1"] = wmin
data["params"]["w2"] = wmax
else:
comp = "smaller "
if angle == 0:
data["params"]["w1"] = wmin
data["params"]["w2"] = wmax
else:
data["params"]["w1"] = wmax
data["params"]["w2"] = wmin
data["params"]["theta"] = angle
orientation = "downwards" if angle == 0 else "upwards"
# ------------------------------------------
# torque
# ------------------------------------------
if random.choice([0,1]):
torque_sign = "positive"
data["params"]["gamma"] = 0
else:
torque_sign = "negative"
data["params"]["gamma"] = 180
for i in range(0,7):
data["params"]["visible" + str(i)] = "false"
question = random.choice([1,2,3,4,5,6])
text = ""
if (question == 1):
text = "Add a point to position $(" + str(xP - x0) + "," + str(canvas_height - yP - y0) + ")$"
text += " w.r.t. the origin $O$. Each square side has length 20."
data["params"]["visible1"] = 'true'
elif (question == 2):
text = "Add a vector at $B$ with orientation $\\theta = $" + str(alphas)
data["params"]["visible2"] = 'true'
data["params"]["visible0"] = 'true'
elif (question == 3):
text = "Add a " + str(arc_orientation_text) + " moment at $C$."
data["params"]["visible3"] = 'true'
data["params"]["visible0"] = 'true'
elif (question == 4):
text = "Apply an uniform distributed load to the beam below, on the interval from $B$ to $C$."
text += " The load should point " + str(orientation) + "."
data["params"]["visible4"] = 'true'
data["params"]["visible0"] = 'true'
elif (question == 5):
text = "Apply a triangular distributed load to the beam below, on the interval from $B$ to $C$."
text += " The load should point " + str(orientation) + "."
text += " The magnitude (in absolute value) in $B$ is " + str(comp) + "than in $C$."
data["params"]["visible5"] = 'true'
data["params"]["visible0"] = 'true'
elif (question == 6):
text = "Apply a " + str(torque_sign) + " torque at $C$."
data["params"]["visible6"] = 'true'
data["params"]["visible0"] = 'true'
data["params"]["text"] = text
return data
|
oleg-cherednik/hackerrank | Python/Introduction/Python - Division/solution.py | Python | apache-2.0 | 120 | 0.008333 | #!/bin/pytho | n3
if __name__ == '__main__':
a = int(input())
b = i | nt(input())
print(a // b)
print(a / b) |
lifulong/account-manager | src/core/file_store.py | Python | gpl-2.0 | 4,172 | 0.040029 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
"""
from exception import *
class base_storage:
def __init__(self, usr=None, usr_key=None):
self.usr_key = None
self.usr = None
self.records = []
if usr is None:
return
self.load_info_from_file()
if self.usr != usr:
raise UsrError
if self.usr_key != usr_key:
raise PasswdError
def new_user(self, usr, usr_key):
"""
create or register new user to file storage
"""
if self.usr is not None:
raise LoginError, "Login In Usr Can Not Create New Usr,You Should Logout First."
self.usr = usr
self.usr_key = usr_key
self.flush_all()
def load_info_from_file(self, filename="passwd"):
"""
load and parse usr-passwd and usr account info
"""
with open(filename) as f:
for line in f:
line = line.strip('\n')
if line is "" or line.startswith("#") or line.startswith('"""'):
continue
if self.parse_manager_usr_info(line):
continue
else:
record = self.parse_manager_record(line)
self.records.append(record)
if self.usr is None or self.usr_key is None:
raise UsrError
def parse_manager_usr_info(self, info_str):
"""
parse account-manager usr info to usr and passwd
"""
info_list = info_str.split(":")
if len(info_list) is not 2:
return False
else:
if info_list[0] == "usr":
self.usr = info_list[1]
elif info_list[0] == "key":
self.usr_key = info_list[1]
if len(self.usr_key) is not 64:
raise ValueError
else:
return False
return True
def parse_manager_record(self, info_str):
"""
parse one record string to record tuple
"""
info_list = info_str.split(":")
if len(info_list) is not 6:
return None
return info_list[0], info_list[1], info_list[2], info_list[3], info_list[4], info_list[5]
def get_usr_info(self, usr=None):
"""Export interface
"""
return self.usr, self.usr_key
def get_usr_key(self, usr=None):
"""Export interface
"""
return self.usr_key
def get_records(self):
"""Export interface
"""
return self.records
def flush_one_re | cord(self, record):
"""
append one record to record file
"""
with open("passwd", "a+") as f:
f.write("{0}:{1}:{2}:{3}:{4}:{5}\n".format(record[0], record[1], record[2], record[3], record[4], record[5]))
def flush_all(self):
"""
flus | h usr&passwd and account record info to record file
"""
with open("passwd", "w+") as f:
if self.usr is not None:
f.write("usr:{0}\n".format(self.usr))
if self.usr_key is not None:
f.write("key:{0}\n".format(self.usr_key))
f.write("#{0}\t:\t{1}\t:\t{2}\t:\t{3}\t:\t{4}\t:\t{5}\n".
format("Ower", "Account", "Alias", "Email", "Mobile", "Passwd"))
for record in self.records:
f.write("{0}:{1}:{2}:{3}:{4}:{5}\n".format(record[0], record[1], record[2], record[3], record[4], record[5]))
def set_usr_info(self, info):
"""Export interface
set usr&key to account info storage
"""
if type(info) is not tuple:
raise TypeError
if len(info) is not 2:
raise ValueError
self.usr = info[0]
self.usr_key = info[1]
self.flush_all()
def set_key(self, key):
"""Export interface
set usr key to account info storage
"""
if self.usr is None:
raise UsrError, "Usr Is None."
if type(key) is not str:
raise TypeError
if key is None:
raise ValueError
self.usr_key = key
self.flush_all()
def put_record(self, record):
"""Export interface
"""
if record is not tuple:
raise TypeError
if len(record) is not 6:
raise ValueError
self.records.append(record)
self.flush_all()
#Check repeat
def append_record(self, record):
"""Export interface
"""
if type(record) is not tuple:
raise TypeError
if len(record) is not 6:
raise ValueError
self.records.append(record)
self.flush_one_record(record)
def put_records(self, records):
pass
def append_records(self, records):
if type(records) is not list:
raise TypeError
for record in records:
if type(record) is not tuple:
raise TypeError
if len(record) is not 6:
raise ValueError
self.records.append(record)
self.flush_one_record(record)
if __name__ == '__main__' :
pass
|
HaebinShin/tensorflow | tensorflow/python/kernel_tests/sparse_ops_test.py | Python | apache-2.0 | 27,450 | 0.00969 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Python ops defined in sparse_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import googletest
# TODO(zongheng): it'd be great to factor out this function and various random
# SparseTensor gen funcs.
def _sparsify(x, thresh=0.5, index_dtype=np.int64):
x[x < thresh] = 0
non_zero = np.where(x)
x_indices = np.vstack(non_zero).astype(index_dtype).T
x_values = x[non_zero]
x_shape = x.shape
return ops.SparseTensor(
indices=x_indices, values=x_values, shape=x_shape), len(x_values)
class SparseToIndicatorTest(test_util.TensorFlowTestCase):
def _SparseTensor_5x6(self, dtype):
ind = np.array([
[0, 0],
[1, 0], [1, 3], [1, 4],
[3, 2], [3, 3]])
val = np.array([0, 10, 13, 14, 32, 33])
shape = np.array([5, 6])
return ops.SparseTensor(
constant_op.constant(ind, dtypes.int64),
constant_op.constant(val, dtype),
constant_op.constant(shape, dtypes.int64))
def _SparseTensor_2x3x4(self, dtype):
# Includes two entries with the form [1, 1, x] : 150.
ind = np.array([
[0, 0, 1],
[0, 1, 0],
[0, 1, 2],
[1, 0, 3],
[1, 1, 0],
[1, 1, 1],
[1, 1, 2],
[1, 2, 2]])
val = np.array([1, 10, 12, 103, 150, 149, 150, 122])
shape = np.array([2, 3, 4])
return ops.SparseTensor(
constant_op.constant(ind, dtypes.int64),
constant_op.constant(val, dtype),
constant_op.constant(shape, dtypes.int64))
def testInt32(self):
with self.test_session(use_gpu=False):
sp_input = self._SparseTensor_5x6(dtypes.int32)
output = spar | se_ops.sparse_to_indicator(sp_input, 50).eval()
expected_output = np.zeros((5, 50), dtype=np.bool)
expected_trues = ((0, 0), (1, 10), (1, 13), (1, 14), (3, 32), (3, 33))
for expected_true in expected_trues:
expected_output[expected_true] = True
self.assertAllEqual(output, expected_output)
def testInt64(self):
with | self.test_session(use_gpu=False):
sp_input = self._SparseTensor_5x6(dtypes.int64)
output = sparse_ops.sparse_to_indicator(sp_input, 50).eval()
expected_output = np.zeros((5, 50), dtype=np.bool)
expected_trues = [(0, 0), (1, 10), (1, 13), (1, 14), (3, 32), (3, 33)]
for expected_true in expected_trues:
expected_output[expected_true] = True
self.assertAllEqual(output, expected_output)
def testHigherRank(self):
with self.test_session(use_gpu=False):
sp_input = self._SparseTensor_2x3x4(dtypes.int64)
output = sparse_ops.sparse_to_indicator(sp_input, 200).eval()
expected_output = np.zeros((2, 3, 200), dtype=np.bool)
expected_trues = [(0, 0, 1), (0, 1, 10), (0, 1, 12),
(1, 0, 103), (1, 1, 149), (1, 1, 150),
(1, 2, 122)]
for expected_true in expected_trues:
expected_output[expected_true] = True
self.assertAllEqual(output, expected_output)
class SparseMergeTest(test_util.TensorFlowTestCase):
def _SparseTensor_3x50(self, indices_dtype, values_dtype):
ind = np.array([
[0, 0],
[1, 0], [1, 1], [1, 2],
[2, 0], [2, 1]])
# NB: these are not sorted
indices = np.array([0, 13, 10, 14, 32, 33])
values = np.array([-3, 4, 1, 1, 5, 9])
shape = np.array([3, 3])
indices = ops.SparseTensor(
constant_op.constant(ind, dtypes.int64),
constant_op.constant(indices, indices_dtype),
constant_op.constant(shape, dtypes.int64))
values = ops.SparseTensor(
constant_op.constant(ind, dtypes.int64),
constant_op.constant(values, values_dtype),
constant_op.constant(shape, dtypes.int64))
return indices, values
def testInt32AndFloat32(self):
vocab_size = 50
with self.test_session(use_gpu=False) as sess:
indices, values = self._SparseTensor_3x50(dtypes.int32, dtypes.float32)
sp_output = sparse_ops.sparse_merge(indices, values, vocab_size)
output = sess.run(sp_output)
self.assertAllEqual(
output.indices,
[[0, 0], [1, 10], [1, 13], [1, 14], [2, 32], [2, 33]])
self.assertAllEqual(
output.values,
[-3, 1, 4, 1, 5, 9])
self.assertAllEqual(
output.shape,
[3, vocab_size])
def testInt64AndFloat32(self):
vocab_size = 50
with self.test_session(use_gpu=False) as sess:
indices, values = self._SparseTensor_3x50(dtypes.int64, dtypes.float32)
sp_output = sparse_ops.sparse_merge(indices, values, vocab_size)
output = sess.run(sp_output)
self.assertAllEqual(
output.indices,
[[0, 0], [1, 10], [1, 13], [1, 14], [2, 32], [2, 33]])
self.assertAllEqual(
output.values,
[-3, 1, 4, 1, 5, 9])
self.assertAllEqual(
output.shape,
[3, vocab_size])
def testInt64AndFloat64(self):
vocab_size = 50
with self.test_session(use_gpu=False) as sess:
indices, values = self._SparseTensor_3x50(dtypes.int64, dtypes.float64)
sp_output = sparse_ops.sparse_merge(indices, values, vocab_size)
output = sess.run(sp_output)
self.assertAllEqual(
output.indices,
[[0, 0], [1, 10], [1, 13], [1, 14], [2, 32], [2, 33]])
self.assertAllEqual(
output.values,
[-3, 1, 4, 1, 5, 9])
self.assertAllEqual(
output.shape,
[3, vocab_size])
class SparseRetainTest(test_util.TensorFlowTestCase):
def _SparseTensor_5x6(self):
ind = np.array([
[0, 0],
[1, 0], [1, 3], [1, 4],
[3, 2], [3, 3]])
val = np.array([0, 10, 13, 14, 32, 33])
shape = np.array([5, 6])
return ops.SparseTensor(
constant_op.constant(ind, dtypes.int64),
constant_op.constant(val, dtypes.int32),
constant_op.constant(shape, dtypes.int64))
def testBasic(self):
with self.test_session(use_gpu=False) as sess:
sp_input = self._SparseTensor_5x6()
to_retain = np.array([1, 0, 0, 1, 1, 0], dtype=np.bool)
sp_output = sparse_ops.sparse_retain(sp_input, to_retain)
output = sess.run(sp_output)
self.assertAllEqual(output.indices, [[0, 0], [1, 4], [3, 2]])
self.assertAllEqual(output.values, [0, 14, 32])
self.assertAllEqual(output.shape, [5, 6])
def testRetainNone(self):
with self.test_session(use_gpu=False) as sess:
sp_input = self._SparseTensor_5x6()
to_retain = np.zeros((6,), dtype=np.bool)
sp_output = sparse_ops.sparse_retain(sp_input, to_retain)
output = sess.run(sp_output)
self.assertAllEqual(output.indices, np.array([]).reshape((0, 2)))
self.assertAllEqual(output.values, [])
self.assertAllEqual(output.shape, [5, 6])
def testMismatchedRetainShape(self):
with self.test_session(use_gpu=False):
sp_input = self._SparseTensor_5x6()
to_retain = np.array([1, 0, 0, 1, 0], dtype=np.bool)
with self.assertRaises(ValueError):
sparse_ops.sparse_retain(sp_input, to_retain)
class SparseResetShapeTe |
orione7/plugin.video.streamondemand-pureita | core/cloudflare(old).py | Python | gpl-3.0 | 6,228 | 0.013327 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta 4
# Copyright 2015 tvalacarta@gmail.com
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of pelisalacarta 4.
#
# pelisalacarta 4 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pelisalacarta 4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pelisalacarta 4. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------------
# Cloudflare decoder
# --------------------------------------------------------------------------------
import urlparse
import re
import time
from core import logger
import urllib
class Cloudflare:
def __init__(self, response):
self.timeout = 5
self.domain = urlparse.urlparse(response["url"])[1]
self.protocol = urlparse.urlparse(response["url"])[0]
self.js_data = {}
self.header_data = {}
if not "var s,t,o,p,b,r,e,a,k,i,n,g,f" in response["data"] or "chk_jschl" in response["url"]:
return
try:
self.js_data["auth_url"] = re.compile('<form id="challenge-form" action="([^"]+)" method="get">').findall(response["data"])[0]
self.js_data["params"] = {}
self.js_data["params"]["jschl_vc"] = re.compile('<input type="hidden" name="jschl_vc" value="([^"]+)"/>').findall(response["data"])[0]
self.js_data["params"]["pass"] = re.compile('<input type="hidden" name="pass" value="([^"]+)"/>').findall(response["data"])[0]
var, self.js_data["value"] = re.compile('var s,t,o,p,b,r,e,a,k,i,n,g,f[^:]+"([^"]+)":([^\n]+)};', re.DOTALL).findall(response["data"])[0]
self.js_data["op"] = re.compile(var + "([\+|\-|\*|\/])=([^;]+)", re.MULTILINE).findall(response["data"])
self.js_data["wait"] = int(re.compile("\}, ([\d]+)\);", re.MULTILINE).findall(response["data"])[0]) / 1000
except:
logger.debug("Metodo #1 (javascript): NO disponible")
self.js_data = {}
if "refresh" in response["headers"]:
try:
self.header_data["wait"] = int(response["headers"]["refresh"].split(";")[0])
self.header_data["auth_url"] = response["headers"]["refresh"].split("=")[1].split("?")[0]
self.header_data["params"] = {}
self.header_data["params"]["pass"] = response["headers"]["refresh"].split("=")[2]
except:
logger.debug("Metodo #2 (headers): NO disponible")
self.header_data = {}
@property
def wait_time(self):
if self.js_data.get("wait", 0):
return self.js_data["wait"]
else:
return self.header_data.get("wait", 0)
@property
def is_cloudflare(self):
return self.header_data.get("wait", 0) > 0 or self.js_data.get("wait", 0) > 0
def get_url(self):
#Metodo #1 (javascript)
if self.js_data.get("wait", 0):
jschl_answer = self.decode(self.js_data["value"])
for op, v in self.js_data["op"]:
jschl_answer = eval(str(jschl_answer) + op + str(self.decode(v)))
self.js_data["params"]["jschl_answer"] = jschl_answer + len(self.domain)
response = "%s://%s%s?%s" % (self.protocol, self.domain, self.js_data["auth_url"], urllib.urlencode(self.js_data["params"]))
time.sleep(self.js_data["wait"])
return response
#Metodo #2 (headers)
if self.header_data.get("wait", 0):
response = "%s://%s%s?%s" % (self.protocol, self.domain, self.header_data["auth_url"], urllib.urlencode(self.header_data["params"]))
time.sleep(self.header_data["wait"])
| return response
def decode(self, data):
t = time.time()
timeout = False
while not timeout:
data = re.sub("\[\]", "''", data)
data = re.sub("!\+''", "+1", data)
data = re.sub("!''", "0", data)
data = re.sub("!0", "1", | data)
if "(" in data:
x, y = data.rfind("("), data.find(")", data.rfind("(")) +1
part = data[x+1:y-1]
else:
x=0
y = len(data)
part = data
val = ""
if not part.startswith("+"): part = "+" + part
for i, ch in enumerate(part):
if ch == "+":
if not part[i+1] == "'":
if val == "": val = 0
if type(val) == str:
val = val + self.get_number(part, i+1)
else:
val = val + int(self.get_number(part, i+1))
else:
val = str(val)
val = val + self.get_number(part, i+1) or "0"
if type(val) == str: val = "'%s'" % val
data = data[0:x] + str(val) + data[y:]
timeout = time.time() - t > self.timeout
if not "+" in data and not "(" in data and not ")" in data:
return int(self.get_number(data))
def get_number(self, str, start=0):
ret = ""
for chr in str[start:]:
try:
int(chr)
except:
if ret: break
else:
ret += chr
return ret
|
jhesketh/zookeepr | zkpylons/tests/functional/test_template.py | Python | gpl-2.0 | 2,064 | 0.002907 | from zkpylons.tests.functional import *
class TestTemplateController(ControllerTest):
"""Tests the Template controller and wiki integration."""
# Implementation
def setUp(self):
super(TestTemplateController, self).setUp()
self.logged_in = False
# create a user
self.p = model.core.Person(email_address='testguy@example.org',
password='p4ssw0rd')
self.p.activated = True
self.p.handle = 'Foomongler'
self.dbsession.save(self.p)
self.dbsession.flush()
self.pid = self.p.id
def tearDown(self):
# clean up
self._login()
resp = self.app.get('/NobodyExpectsTheSpanishInquisition', status=200)
if resp.body.find('bright side of life') > -1:
f = resp.forms[0]
f['action'] = 'DeletePage'
resp = f.submit()
resp.mustcontain('Really delete this page?')
f = resp.forms[0]
f.submit('delete')
self.dbsession.delete(self.dbsession.query(model.Person).get(self.p.id))
self.dbsession.flush()
super(TestTemplateController, self).tearDown()
def assertNotLoggedIn(self):
# confirm we aren't logged in
resp = self.app.get(url_for(controller='/NobodyExpectsTheSpanishInquisition'), status=200)
self.assertEquals(-1, resp.body.find('Foomongler'))
def _login(self):
if not self.logged_in:
# log in
resp = self.app.get(url_for(controller='person',
action='signin', id=None))
f = resp.form
f['email_address'] = 'testguy@example.org'
f['password'] = 'p4ssw0rd'
resp = f.submit()
self.logged_in = True
def test_clean_html(self):
resp = self.app.get('/NobodyExpectsTheSpanishInquisition', status=200)
| self.assertEquals(1, resp.body.count('<html'))
self.assertEquals(1, resp.body.count('<body'))
self.assertEquals(1, resp.body.count | ('<head'))
|
luoguizhou/gooderp_addons | app_odoo_customize/models/ir_ui_view.py | Python | agpl-3.0 | 576 | 0.006944 | # -*- coding: utf-8 -*-
import logging
from openerp import api, fields, models, _
_logger = logging.getLogger(__name__)
class View(models.Model):
_inherit = 'ir.ui.view'
@api.model
def render_ | template(self, template, values=None, engine='ir.qweb'):
if template in ['web.login', 'web.webclient_bootstrap']: |
if not values:
values = {}
values["title"] = self.env['ir.config_parameter'].get_param("app_system_name", "odooApp")
return super(View, self).render_template(template, values=values, engine=engine) |
jelly/calibre | src/calibre/gui2/convert/lrf_output.py | Python | gpl-3.0 | 1,262 | 0.006339 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from calibre.gui2.convert.lrf_output_ui import Ui_Form
from calibre.gui2.convert import Widg | et
font_family_model = None
class PluginWidget(Widget, Ui_Form):
TITLE = _('LRF output')
HELP = _('Options specific to')+' LRF '+_('output')
COMMIT_NAME = 'lrf_output'
ICON = I('mimetypes/lrf.png')
def __init__(self, parent, get_option, get_help, db=None, book_id=None):
Widget.__init__(self, parent,
['word | space', 'header', 'header_format',
'minimum_indent', 'serif_family',
'render_tables_as_images', 'sans_family', 'mono_family',
'text_size_multiplier_for_rendered_tables', 'autorotation',
'header_separation', 'minimum_indent']
)
self.db, self.book_id = db, book_id
self.initialize_options(get_option, get_help, db, book_id)
self.opt_header.toggle(), self.opt_header.toggle()
self.opt_render_tables_as_images.toggle()
self.opt_render_tables_as_images.toggle()
|
JoeyAndres/moodle-parallel-phpunit | utility.py | Python | gpl-2.0 | 7,806 | 0.005124 | """
utility.py
This is where functions that belongs to a reasonably size module.
This is also the place for functions that uses "os.system" which are
inherently UGLY.
"""
import time
import os
import subprocess
import re
import xml.etree.ElementTree as ET
# Local
import config
import const
"""
@type function|lambda
@param fn_or_lambda function or lambda to be executed and timed.
@type *args
@param *args Python's *args, (aka varargs). Dereferences a tuple if only args.
@return integer (s)
"""
def execute_and_time(fn_or_lambda, *args):
start_time = time.time()
fn_or_lambda(*args)
end_time = time.time()
return end_time - start_time
"""
@type function|lambda
@param fn_or_lambda function or lambda to be executed and timed.
@type *args
@param *args Python's *args, (aka varargs). Dereferences a tuple if only args.
@return (integer (s), return value of fn_or_lambda)
"""
def execute_and_time_with_return(fn_or_lambda, *args):
start_time = time.time()
rv = fn_or_lambda(*args)
end_time = time.time()
return (end_time - start_time, rv)
"""
"""
def extract_testsuites_from_phpunitxml(phpunitxml=config.moodle_phpunitxml_file):
phpunit_xml_tree = ET.parse(phpunitxml)
phpunit_xml_root = phpunit_xml_tree.getroot()
testsuites_node = phpunit_xml_root[1];
# Acquire all the testsuites and placed them in testsuites_arr.
testsuites_arr=[]
for testsuite_node in testsuites_node:
# See if testsuite node have any child nodes (aka directory nodes).
if len(testsuite_node) > 0:
testsuites_arr.append(testsuite_node.get('name'))
return testsuites_arr
"""
Clears the contents of the file.
@type string
@param file_path Path to the file to clear the contents of.
"""
def empty_file(file_path):
os.system('echo "" > {0}'.format(file_path))
"""
Merges all the source_files to the dest_file.
@type List
@param source_files List of path of files to merge.
@type string
@param dest_file The path of the file in which all the source_files are merged.
"""
def merge_files(source_files, dest_file):
# Create the dest_file first.
os.system('echo "" > {0}'.format(dest_file))
for source_file in source_files:
os.system('cat {0} >> {1}'.format(source_file, dest_file))
"""
@type List
@param files List containing the path of files to remove.
"""
def rm_files(files):
[os.system('rm {0}'.format(f)) for f in files]
"""
@type list
@param directories List of directories to create.
"""
def mkdirs(dirs):
[os.system('mkdir -p {0}'.format(dir)) for dir in dirs]
|
def build_container(container_name, docker_file, docker_file_directory):
cmd = "{0}/build-container.sh {1} {2} {3}".format(
config.bash_files,
container_name,
docker_file,
docker_file_directory)
os.system(cmd)
"""
Starts the docker container.
@type string
@param container_name Name of the container to start.
"""
def start_cont | ainer(container_name):
cmd = "{0}/start-container.sh {1}".format(
config.bash_files,
container_name)
os.system(cmd)
"""
Remove docker container if it exist. Nothing happens otherwise.
@type string
@param container_name Name of the container to remove.
"""
def remove_container(container_name):
cmd = "{0}/remove-container.sh {1}".format(
config.bash_files,
container_name)
os.system(cmd)
"""
Creates the container from a given image.
@type string
@param image_name Name of the image that is built already.
@type string
@param container_name Name of the container to create.
@type string
@param moodle_directory Path to the moodle directory. Defaults to the one in
config.
"""
def create_container(image_name,
container_name,
moodle_directory=config.moodle_directory):
extra_options = "\" -v {0}:/phpu_moodledata\"".format(
config.container_phpunit_dataroot_template.format(container_name))
cmd = "{0}/create-moodle-parallel-phpunit-container.sh {1} {2} {3} {4}".format(
config.bash_files,
image_name,
container_name,
moodle_directory,
extra_options)
os.system(cmd)
"""
Blocks when the given container name is loading.
@type string
@param container_name Name of the container to watch and block while
its database function.
"""
def block_while_initializing_db(container_name):
os.system("{0}/block-until-db-starts.sh {1}".format(
config.bash_files,
container_name))
"""
Executes the given testsuite(s) in the given container and appends
its result in the result_file.
@type string
@param container_name Name of the container to run the unit test.
@type list
@param testsuites List of testsuite(s) to run.
@type string
@param result_file Path to the result file where the test results are appended.
@return True if no error, otherwise False.
"""
def run_phpunit_test(container_name, testsuites, result_file):
if len(testsuites) is 1:
cmd = "{0}/test-moodle-parallel-phpunit.sh {1} {2} {3}".format(
config.bash_files,
container_name,
testsuites[0],
result_file)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
match = re.search("There was [0-9]+ failure", out)
no_error = match is None
# todo: remove duplication.
if no_error:
return True
else:
return False
elif len(testsuites) > 1:
cmd = "{0}/test-moodle-parallel-phpunit.sh {1} \"{2}\" {3}".format(
config.bash_files,
container_name,
" ".join(testsuites),
result_file)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
match = re.search("There was [0-9]+ failure", out)
no_error = match is None
if no_error:
return True
else:
return False
# Otherwise nothing happens.
"""
Initialize the phpunit db of a given container. Specifically, executes:
php admin/tool/phpunit/cli/init.php
@type string
@param container_name Name of the container to initialize db.
"""
def initialize_phpunit_db(container_name):
cmd = "{0}/initialize-moodle-parallel-phpunit-db.sh {1}".format(
config.bash_files,
container_name)
os.system(cmd)
"""
Backup the phpunit db to a backup_file.
@type string
@param container_name Name of the container to backup the db of.
"""
def backup_phpunit_db(container_name, backup_file=config.backup_file):
cmd = "{0}/backup-postgresql.sh {1} {2}".format(
config.bash_files,
container_name,
backup_file)
os.system(cmd)
"""
Restore the phpunit db.
@type string
@param container_name Name of the container to restore db.
"""
def restore_phpunit_db(container_name):
cmd = "{0}/restore-postgresql.sh {1}".format(
config.bash_files,
container_name)
os.system(cmd)
def copy_dir(src_dir, dest_dir):
os.system("cp -TR {0} {1}".format(src_dir, dest_dir))
def copy_file_to_dir(src_file, dest_dir):
os.system("cp {0} {1}".format(src_file, dest_dir))
"""
@type *
@param arg The arg to examine.
@type list
@param arg_list The list in which arg belongs.
@type lambda|function
@param handler The handler to the next argument if any.
@type string
@param msg The message to display in error.
"""
def handle_option(option, arg_list, handler, msg="", *args):
if option in arg_list:
arg_index = arg_list.index(option) + 1
try:
handler(option, arg_list, arg_index, *args)
except IndexError:
print msg
return const.ERROR
return const.OK
|
hugovk/congress-legislators | scripts/sweep.py | Python | cc0-1.0 | 1,432 | 0.007682 | #!/usr/bin/env python
from utils import load_data, save_data
def run():
# load in members, orient by bioguide ID
print("Loading current legislators...")
current = load_data("legislators-current.yaml")
current_bioguide = { }
for m in current:
if "bioguide" in m["id"]:
current_bioguide[m["id"]["bioguide"]] = m
# remove out-of-office people from current committee membership
print("Sweeping committee membership...")
membership_current = load_data("committee-membership-current.yaml")
for committee_id in list(membership_curre | nt.keys()):
for member in membership_current[committee_id]:
if member["bioguide"] not in current_bioguide:
print("\ | t[%s] Ding ding ding! (%s)" % (member["bioguide"], member["name"]))
membership_current[committee_id].remove(member)
save_data(membership_current, "committee-membership-current.yaml")
# remove out-of-office people from social media info
print("Sweeping social media accounts...")
socialmedia_current = load_data("legislators-social-media.yaml")
for member in list(socialmedia_current):
if member["id"]["bioguide"] not in current_bioguide:
print("\t[%s] Ding ding ding! (%s)" % (member["id"]["bioguide"], member["social"]))
socialmedia_current.remove(member)
save_data(socialmedia_current, "legislators-social-media.yaml")
if __name__ == '__main__':
run()
|
hes19073/hesweewx | bin/weewx/reportengine.py | Python | gpl-3.0 | 33,225 | 0.001625 | #
# Copyright (c) 2009-2020 Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
"""Engine for generating reports"""
from __future__ import absolute_import
# System imports:
import datetime
import ftplib
import glob
import logging
import os.path
import threading
import time
import traceback
# 3rd party imports
from six.moves import zip
import configobj
# WeeWX imports:
import weeutil.config
import weeutil.logger
import weeutil.weeutil
import weewx.defaults
import weewx.manager
from weeutil.weeutil import to_bool
log = logging.getLogger(__name__)
# spans of valid values for each CRON like field
MINUTES = (0, 59)
HOURS = (0, 23)
DOM = (1, 31)
MONTHS = (1, 12)
DOW = (0, 6)
# valid day names for DOW field
DAY_NAMES = ('sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat')
# valid month names for month field
MONTH_NAMES = ('jan', 'feb', 'mar', 'apr', 'may', 'jun',
'jul', 'aug', 'sep', 'oct', 'nov', 'dec')
# map month names to month number
MONTH_NAME_MAP = list(zip(('jan', 'feb', 'mar', 'apr',
'may', 'jun', 'jul', 'aug',
'sep', 'oct', 'nov', 'dec'), list(range(1, 13))))
# map day names to day number
DAY_NAME_MAP = list(zip(('sun', 'mon', 'tue', 'wed',
'thu', 'fri', 'sat'), list(range(7))))
# map CRON like nicknames to equivalent CRON like line
NICKNAME_MAP = {
"@yearly": "0 0 1 1 *",
"@anually": "0 0 1 1 *",
"@monthly": "0 0 1 * *",
"@weekly": "0 0 * * 0",
"@daily": "0 0 * * *",
"@hourly": "0 * * * *"
}
# list of valid spans for CRON like fields
SPANS = (MINUTES, HOURS, DOM, MONTHS, DOW)
# list of valid names for CRON lik efields
NAMES = ((), (), (), MONTH_NAMES, DAY_NAMES)
# list of name maps for CRON like fields
MAPS = ((), (), (), MONTH_NAME_MAP, DAY_NAME_MAP)
# =============================================================================
# Class StdReportEngine
# =============================================================================
class StdReportEngine(threading.Thread):
"""Reporting engine for weewx.
This engine runs zero or more reports. Each report uses a skin. A skin
has its own configuration file specifying things such as which 'generators'
should be run, which templates are to be used, what units are to be used,
etc..
A 'generator' is a class inheriting from class ReportGenerator, that
produces the parts of the report, such as image plots, HTML files.
StdReportEngine inherits from threading.Thread, so it will be run in a
separate thread.
See below for examples of generators.
"""
def __init__(self, config_dict, stn_info, record=None, gen_ts=None, first_run=True):
"""Initializer for the report engine.
config_dict: The configuration dictionary.
stn_info: An instance of weewx.station.StationInfo, with static
station information.
record: The current archive record [Optional; default is None]
gen_ts: The timestamp for which the output is to be current
[Optional; default is the last time in the database]
first_run: True if this is the first time the report engine has been
run. If this is the case, then any 'one time' events should be done.
"""
threading.Thread.__init__(self, name="ReportThread")
self.config_dict = config_dict
self.stn_info = stn_info
self.record = record
self.gen_ts = gen_ts
self.first_run = first_run
def run(self):
"""This is where the actual work gets done.
Runs through the list of reports. """
if self.gen_ts:
log.debug("Running reports for time %s",
weeutil.weeutil.timestamp_to_string(self.gen_ts))
else:
log.debug("Running reports for latest time in the database.")
# Iterate over each requested report
for report in self.config_dict['StdReport'].sections:
# Ignore the [[Defaults]] section
if report == 'Defaults':
continue
# See if this report is disabled
enabled = to_bool(self.config_dict['StdReport'][report].get('enable', True))
if not enabled:
log.debug("Report '%s' not enabled. Skipping.", report)
continue
log.debug("Running report '%s'", report)
# Fetch and build the skin_dict:
try:
skin_dict = se | lf._build_skin_dict(report)
except SyntaxError as e:
log.error("Syntax error: %s", e)
log.error(" **** Report ignored")
continue
# Default action is to run the report. Only reason to not run it is
# if we have a valid report report_timing and it did not trigger.
if self.record:
# StdReport called us not wee_reports so look for a report_timing
# entry if we have one.
| timing_line = skin_dict.get('report_timing')
if timing_line:
# Get a ReportTiming object.
timing = ReportTiming(timing_line)
if timing.is_valid:
# Get timestamp and interval so we can check if the
# report timing is triggered.
_ts = self.record['dateTime']
_interval = self.record['interval'] * 60
# Is our report timing triggered? timing.is_triggered
# returns True if triggered, False if not triggered
# and None if an invalid report timing line.
if timing.is_triggered(_ts, _ts - _interval) is False:
# report timing was valid but not triggered so do
# not run the report.
log.debug("Report '%s' skipped due to report_timing setting", report)
continue
else:
log.debug("Invalid report_timing setting for report '%s', "
"running report anyway", report)
log.debug(" **** %s", timing.validation_error)
if 'Generators' in skin_dict and 'generator_list' in skin_dict['Generators']:
for generator in weeutil.weeutil.option_as_list(skin_dict['Generators']['generator_list']):
try:
# Instantiate an instance of the class.
obj = weeutil.weeutil.get_object(generator)(
self.config_dict,
skin_dict,
self.gen_ts,
self.first_run,
self.stn_info,
self.record)
except Exception as e:
log.error("Unable to instantiate generator '%s'", generator)
log.error(" **** %s", e)
weeutil.logger.log_traceback(log.error, " **** ")
log.error(" **** Generator ignored")
traceback.print_exc()
continue
try:
# Call its start() method
obj.start()
except Exception as e:
# Caught unrecoverable error. Log it, continue on to the
# next generator.
log.error("Caught unrecoverable exception in generator '%s'", generator)
log.error(" **** %s", e)
weeutil.logger.log_traceback(log.error, " **** ")
log.error(" **** Generator terminated")
traceback.print_exc()
continue
finally:
obj.finalize()
else:
log.debug("No generators specified |
CAMI-challenge/evaluation | binning/igregor/algbioi/eval/cami.py | Python | mit | 3,516 | 0.001138 | #!/usr/bin/env python
"""
Copyright (C) 2014 Ivan Gregor
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Contains functionality that enables the use of the evaluation scripts for the CAMI challenge.
"""
import os
from algbioi.com import csv
from algbioi.com import fasta
def concatenate(directory, outputFile):
out = csv.OutFileBuffer(outputFile)
for f in os.listdir(directory):
path = os.path.join(directory, f)
name = f.split('.')[0]
seqIdToSeq = fasta.fastaFileToDict(path)
out.writeText('>' + str(name) + '\n')
for seqId, seq in seqIdToSeq.iteritems():
out.writeText(str(seq) + 200*'N' + '\n')
out.close()
def outToCami(ppspOutFile):
"""
Creates a cami output file, in format:
#CAMI Format for Binning
@Task:Binning
@Version:1.0
@ContestantID:CONTESTANTID
@SampleID:SAMPLEID
@Referencebased:T
@Assemblybased:T
@ReplicateInfo:T
@@SEQUENCEID TAXID BINID
read1201 123 123
read1202 123 123
read1203 131564 131564
read1204 562 562.1
read1205 562 562.2
"""
out = csv.OutFileBuffer(ppspOutFile + '.cami')
out.writeText("""#CAMI Format for Binning
@Task:Binning
@Version:1.0
@ContestantID:CONTESTANTID
@SampleID:SAMPLEID
@Referencebased:T
@Assemblybased:T
@ReplicateInfo:T
@@SEQUENCEID TAXID BINID
""")
for line in open(ppspOutFile):
name, taxonId = line.strip('\n').split('\t', 2)
out.writeText("%s\t%s\t%s\n" % (name, taxonId, taxonId))
out.close()
def createCamiOut(fileDir):
"""
Directory where the output files are located
"""
outList = getPPSPOutPathList(fileDir)
if len(outList) != 1:
print('Unusual number of the output "pOUT" files detected: %s' % len(outList))
for path in outList:
outToCami(path)
def getPPSPOutPathList(outDir):
retList = []
for f in os.listdir(outDir):
fList = f.split('.')
if fList[-1] == 'pOUT' and fList[-2] != 'PP':
retList.append(os.path.join(outDir, f))
return retList
def readAssignments(assignmentFile):
"""
Reads an assingment file, either in the cami format or in the PPS output (out) format
@rtype: dict
@return: mapping(name->taxonId)
"""
if os.path.basename(assignmentFile).split('.')[-1] == 'cami':
return readCami(assi | gnmen | tFile)
else:
return csv.predToDict(assignmentFile)
def readCami(camiAssignFile):
"""
Reads a file in the cami format
@rtype: dict
"""
ret = {}
for line in open(camiAssignFile):
line = line.strip()
if not (line.startswith('#') or line.startswith('@') or len(line) == 0):
name, taxonId = line.split('\t')[0:2]
ret[name] = taxonId
else:
print line
return ret
|
zsoltdudas/lis-tempest | tempest/api/network/admin/test_agent_management.py | Python | apache-2.0 | 3,750 | 0 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.network import base
from tempest.common import tempest_fixtures as fixtures
from tempest import test
class AgentManagementTestJSON(base.BaseAdminNetworkTest):
@classmethod
def skip_checks(cls):
super(AgentManagementTestJSON, cls).skip_checks()
if not test.is_extension_enabled('agent', 'network'):
msg = "agent extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(AgentManagementTestJSON, cls).resource_setup()
body = cls.admin_agents_client.list_agents()
agents = body['agents']
cls.agent = agents[0]
@test.idempotent_id('9c80f04d-11f3-44a4-8738-ed2f879b0ff4')
def test_list_agent(self):
body | = self.admin_agents_client.list_agents()
agents = body['agents']
# Hearthbeats must be excluded from comparison
self.agent.pop('heartbeat_timestamp', None)
self.agent.pop('configurations', None)
for agent in agents:
agent.pop('heartbeat_timestamp', None)
agent.pop('configurations', None)
self.assertIn(self.agent, agents)
@test.idempotent_id('e335be47-b9a1-46fd-be30-0874c0b751e6')
def t | est_list_agents_non_admin(self):
body = self.agents_client.list_agents()
self.assertEqual(len(body["agents"]), 0)
@test.idempotent_id('869bc8e8-0fda-4a30-9b71-f8a7cf58ca9f')
def test_show_agent(self):
body = self.admin_agents_client.show_agent(self.agent['id'])
agent = body['agent']
self.assertEqual(agent['id'], self.agent['id'])
@test.idempotent_id('371dfc5b-55b9-4cb5-ac82-c40eadaac941')
def test_update_agent_status(self):
origin_status = self.agent['admin_state_up']
# Try to update the 'admin_state_up' to the original
# one to avoid the negative effect.
agent_status = {'admin_state_up': origin_status}
body = self.admin_agents_client.update_agent(agent_id=self.agent['id'],
agent=agent_status)
updated_status = body['agent']['admin_state_up']
self.assertEqual(origin_status, updated_status)
@test.idempotent_id('68a94a14-1243-46e6-83bf-157627e31556')
def test_update_agent_description(self):
self.useFixture(fixtures.LockFixture('agent_description'))
description = 'description for update agent.'
agent_description = {'description': description}
body = self.admin_agents_client.update_agent(agent_id=self.agent['id'],
agent=agent_description)
self.addCleanup(self._restore_agent)
updated_description = body['agent']['description']
self.assertEqual(updated_description, description)
def _restore_agent(self):
"""Restore the agent description after update test"""
description = self.agent['description'] or ''
origin_agent = {'description': description}
self.admin_agents_client.update_agent(agent_id=self.agent['id'],
agent=origin_agent)
|
ankit318/appengine-mapreduce | python/test/mapreduce/model_test.py | Python | apache-2.0 | 13,422 | 0.00447 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Disable "Invalid method name"
# pylint: disable=g-bad-name
import datetime
import os
import types
import unittest
import urlparse
from google.appengine.ext import db
from google.appengine.ext import testbed
from mapreduce import hooks
from mapreduce import model
from google.appengine.ext.webapp import mock_webapp
class TestHandler(object):
"""Test handler class."""
def __call__(self, entity):
pass
def process(self, entity):
pass
class TestHandlerWithArgs(object):
"""Test handler with argument in constructor."""
def __init__(self, arg_unused):
"""Constructor."""
pass
def process(self, entity):
"""Empty process function."""
pass
class TestHooks(hooks.Hooks):
"""Test hooks class."""
pass
def test_handler_function(entity):
"""Empty test handler function."""
pass
class HugeTaskTest(unittest.TestCase):
"""HugeTask tests.
Other tests are in end_to_end_test.py
"""
def testIncorrectPayloadVersion(self):
request = mock_webapp.MockRequest()
self.assertRaises(DeprecationWarning,
model.HugeTask.decode_payload,
request)
request.headers[model.HugeTask.PAYLOAD_VERSION_HEADER] = "0"
self.assertRaises(DeprecationWarning,
model.HugeTask.decode_payload,
request)
class TestReader(object):
pass
class TestWriter(object):
pass
class MapperSpecTest(unittest.TestCase):
"""Tests model.MapperSpec."""
ENTITY_KIND = "__main__.TestEntity"
TEST_HANDLER = __name__ + "." + TestHandler.__name__
TEST_READER = __name__ + "." + TestReader.__name__
TEST_WRITER = __name__ + "." + TestWriter.__name__
def setUp(self):
self.default_json = {
"mapper_handler_spec": self.TEST_HANDLER,
"mapper_input_reader": self.TEST_READER,
"mapper_params": {"entity_kind": self.ENTITY_KIND},
"mapper_shard_count": 8}
def testToJson(self):
mapper_spec = model.MapperSpec(
self.TEST_HANDLER,
self.TEST_READER,
{"entity_kind": self.ENTITY_KIND},
8)
self.assertEquals(self.default_json,
mapper_spec.to_json())
mapper_spec = model.MapperSpec(
self.TEST_HANDLER,
self.TEST_READER,
{"entity_kind": self.ENTITY_KIND},
8,
output_writer_spec=self.TEST_WRITER)
d = dict(self.default_json)
d["mapper_output_writer"] = self.TEST_WRITER
self.assertEquals(d, mapper_spec.to_json())
def testFromJson(self):
ms = model.MapperSpec.from_json(self.default_json)
self.assertEquals(self.TEST_READER, ms.input_reader_spec)
self.assertEquals(TestReader, ms.input_reader_class())
self.assertEquals(self.default_json["mapper_input_reader"],
ms.input_reader_spec)
self.assertEquals(self.TEST_HANDLER, ms.handler_spec)
self.assertTrue(isinstance(ms.get_handler(), TestHandler))
self.assertTrue(isinstance(ms.handler, TestHandler))
self.assertEquals(8, ms.shard_count)
d = dict(self.default_json)
d["mapper_output_writer"] = self.TEST_WRITER
ms = model.MapperSpec.from_json(d)
self.assertEquals(self.TEST_WRITER, ms.output_writer_spec)
self.assertEquals(TestWriter, ms.output_writer_class())
def specForHandler(self, handler_spec):
self.default_json["mapper_handler_spec"] = handler_spec
return model.MapperSpec.from_json(self.default_json)
def testClassHandler(self):
"""Test class name as handler spec."""
mapper_spec = self.specForHandler(
__name__ + "." + TestHandler.__name__)
self.assertTrue(TestHandler,
type(mapper_spec.handler))
def testInstanceMethodHandler(self):
"""Test instance method as handler spec."""
mapper_spec = self.specForHandler(
__name__ + "." + TestHandler.__name__ + ".process")
self.assertEquals(types.MethodType,
type(mapper_spec.handler))
# call it
mapper_spec.handler(0)
def testFunctionHandler(self):
"""Test function name as handler spec."""
mapper_spec = self.specForHandler(
__name__ + "." + test_handler_function.__name__)
self.assertEquals(types.FunctionType,
type(mapper_spec.handler))
# call it
mapper_spec.handler(0)
def testHandlerWithConstructorArgs(self):
"""Test class with constructor a | rgs as a ha | ndler."""
mapper_spec = self.specForHandler(
__name__ + "." + TestHandlerWithArgs.__name__)
self.assertRaises(TypeError, mapper_spec.get_handler)
def testMethodHandlerWithConstructorArgs(self):
"""Test method from a class with constructor args as a handler."""
mapper_spec = self.specForHandler(
__name__ + "." + TestHandlerWithArgs.__name__ + ".process")
self.assertRaises(TypeError, mapper_spec.get_handler)
class MapreduceSpecTest(unittest.TestCase):
"""Tests model.MapreduceSpec."""
def testToJson(self):
"""Test to_json method."""
mapper_spec_dict = {"mapper_handler_spec": "TestHandler",
"mapper_input_reader": "TestInputReader",
"mapper_params": {"entity_kind": "bar"},
"mapper_shard_count": 8}
mapreduce_spec = model.MapreduceSpec("my job",
"mr0",
mapper_spec_dict,
{"extra": "value"},
__name__+"."+TestHooks.__name__)
self.assertEquals(
{"name": "my job",
"mapreduce_id": "mr0",
"mapper_spec": mapper_spec_dict,
"params": {"extra": "value"},
"hooks_class_name": __name__+"."+TestHooks.__name__,
},
mapreduce_spec.to_json())
def testFromJsonWithoutOptionalArgs(self):
"""Test from_json method without params and hooks_class_name present."""
mapper_spec_dict = {"mapper_handler_spec": "TestHandler",
"mapper_input_reader": "TestInputReader",
"mapper_params": {"entity_kind": "bar"},
"mapper_shard_count": 8}
mapreduce_spec = model.MapreduceSpec.from_json(
{"mapper_spec": mapper_spec_dict,
"mapreduce_id": "mr0",
"name": "my job",
})
self.assertEquals("my job", mapreduce_spec.name)
self.assertEquals("mr0", mapreduce_spec.mapreduce_id)
self.assertEquals(mapper_spec_dict, mapreduce_spec.mapper.to_json())
self.assertEquals("TestHandler", mapreduce_spec.mapper.handler_spec)
self.assertEquals(None, mapreduce_spec.params)
self.assertEquals(None, mapreduce_spec.hooks_class_name)
def testFromJsonWithOptionalArgs(self):
"""Test from_json method with params and hooks_class_name present."""
mapper_spec_dict = {"mapper_handler_spec": "TestHandler",
"mapper_input_reader": "TestInputReader",
"mapper_params": {"entity_kind": "bar"},
"mapper_shard_count": 8}
mapreduce_spec = model.MapreduceSpec.from_json(
{"mapper_spec": mapper_spec_dict,
"mapreduce_id": "mr0",
"name": "my job",
"params": {"extra": "value"},
"hooks_class_name": __name__+"."+TestHooks.__name__
})
self.assertEquals("my job", mapreduce_spec.name)
self.assertEquals("mr0", mapreduce_spec.mapreduce_id)
self.assertEquals(mapper_spec_dict, mapreduce_spec.mapper.to_json())
self.assertEquals("TestHandler", mapreduce_spec.mapper.handler_spec)
self.assertEquals({ |
Homegateway/SDTTool | sdtv3/SDT3PrintPlain.py | Python | apache-2.0 | 8,166 | 0.038207 | # SDT3PrintPlain.py
#
# Print SDT3 to Plain text
from .SDT3Classes import *
hideDetails = False
# tabulator level
tab = 0
def incTab():
global tab
tab += 1
def decTab():
global tab
if (tab > 0):
tab -= 1
def newLine():
global tab
result = '\n'
for i in range(tab):
result += '\t'
return result
#
# Print functions
#
def print3DomainPlain(domain, options):
global hideDetails
hideDetails = options['hideDetails']
result = 'Domain [id="' + domain.id + '"]'
incTab()
if (domain.doc != None):
result += newLine() + printDoc(domain.doc)
for include in domain.includes:
result += newLine() + printInclude(include)
for module in domain.modules:
result += newLine() + printModuleClass(module)
for device in domain.devices:
result += newLine() + printDevice(device)
decTab()
return result
def printInclude(include):
return 'Include [parse="' + include.parse + '" href="' + include.href + '"]'
#
# Device, SubDevice
#
def printDevice(device):
global hideDetails
result = 'Device [id="' + device.id + '"]'
incTab()
if (device.doc != None and hideDetails == False):
result += newLine() + printDoc(device.doc)
if (hideDetails == False):
for prop in device.properties:
result += | newLine() + printProperty(prop)
for module in device.modules:
result += newLine() + printModule(module)
for subDevice in device.subDevices:
result += newLine() + printSubDevice(subDevice)
decTab()
return result
def printSubDevice(subDevice):
global hideDetails
result = 'SubDevice [id="' + subDevice.id + '"]'
incTab()
if (subDevice.doc != None and hideDetails == False):
result += newLine() + printDoc(subDevice.doc)
if (hideDetails == False):
for prop in sub | Device.properties:
result += newLine() + printProperty(prop)
for module in subDevice.modules:
result += newLine() + printModule(module)
decTab()
return result
#
# Property
#
def printProperty(prop):
result = 'Property ['
incTab()
if (prop.name != None):
result += 'name="' + prop.name + '"'
if (prop.value != None):
result += ' value="' + prop.value + '"'
if (prop.optional != None):
result += ' optional="' + prop.optional + '"'
result += ']'
if (prop.doc):
result += newLine() + printDoc(prop.doc)
if (prop.type):
result += newLine() + printSimpleTypeProperty(prop.type)
decTab()
return result
#
# Print Module, ModuleClass
#
def printModule(module):
result = 'Module [name="' + module.name
if (module.optional != None):
result += ' optional="' + module.optional + '"'
result += ']'
if (hideDetails == False):
result += printModuleDetails(module)
return result
def printModuleClass(moduleClass):
result = 'ModuleClass [name="' + moduleClass.name
if (moduleClass.optional != None):
result += ' optional="' + moduleClass.optional + '"'
result += ']'
if (hideDetails == False):
result += printModuleDetails(moduleClass)
return result
def printModuleDetails(module):
incTab()
result = ''
if (module.doc != None):
result += newLine() + printDoc(module.doc)
if (module.extends != None):
result += newLine() + printExtends(module.extends)
if (hideDetails == False):
for prop in module.properties:
result += newLine() + printProperty(prop)
for action in module.actions:
result += newLine() + printAction(action)
for data in module.data:
result += newLine() + printDataPoint(data)
for event in module.events:
result += newLine() + printEvent(event)
decTab()
return result
def printExtends(extends):
return 'Extends [domain="' + extends.domain + '" class="' + extends.clazz + '"]'
#
# Action, Argument
#
def printAction(action):
result = 'Action [name="' + action.name + '"'
if (action.optional != None):
result += ' optional="' + action.optional + '"'
result += ']'
incTab()
if (action.doc != None):
result += newLine() + printDoc(action.doc)
for argument in action.args:
result += newLine() + printArgument(argument)
if (action.type != None):
result += newLine() + printDataType(action.type)
decTab()
return result
def printArgument(argument):
result = 'Arg ['
if (argument.name != None):
result += 'name="' + argument.name + '"'
result += ']'
incTab()
if (argument.doc != None):
result += newLine() + printDoc(argument.doc)
if (argument.type):
result += newLine() + printDataType(argument.type)
decTab()
return result
#
# Event
#
def printEvent(event):
result = 'Event [name="' + event.name
if (event.optional != None):
result += ' optional="' + event.optional + '"'
result += ']'
incTab()
if (event.doc != None):
result += newLine() + printDoc(event.doc)
for dataPoint in event.data:
result += newLine() + printDataPoint(dataPoint)
decTab()
return result
#
# DataPoint
#
def printDataPoint(datapoint):
result = 'DataPoint [name="' + datapoint.name + '"'
if (datapoint.optional != None):
result += ' optional="' + datapoint.optional + '"'
if (datapoint.writable != None):
result += ' writable="' + datapoint.writable + '"'
if (datapoint.readable != None):
result += ' readable="' + datapoint.readable + '"'
if (datapoint.eventable != None):
result += ' eventable="' + datapoint.eventable + '"'
result += ']'
incTab()
if (datapoint.doc != None):
result += newLine() + printDoc(datapoint.doc)
if (datapoint.type != None):
result += newLine() + printDataType(datapoint.type)
decTab()
return result
#
# DataTypes
#
def printDataType(dataType):
if (isinstance(dataType.type, SDT3SimpleType)):
result = printSimpleType(dataType)
elif (isinstance(dataType.type, SDT3StructType)):
result = printStructType(dataType)
elif (isinstance(dataType.type, SDT3ArrayType)):
result = printArrayType(dataType)
return result
def printSimpleType(dataType):
simpleType = dataType.type
result = 'SimpleType'
result += printDataTypeAttributes(dataType)
if (len(result) > 0):
result += ' '
result += '[type="' + simpleType.type + '"]'
if (dataType.doc != None):
incTab()
result += newLine() + printDoc(dataType.doc)
decTab()
incTab()
for constraint in dataType.constraints:
result += newLine() + printConstraint(constraint)
decTab()
return result
def printSimpleTypeProperty(simpleType):
result = 'SimpleType'
if (len(result) > 0):
result += ' '
result += '[type="' + simpleType.type + '"]'
return result
def printStructType(dataType):
result = 'Struct'
result += printDataTypeAttributes(dataType)
incTab()
for element in dataType.type.structElements:
result += newLine() + printDataType(element)
decTab()
if (dataType.doc != None):
incTab()
result += newLine() + printDoc(dataType.doc)
decTab()
incTab()
for constraint in dataType.constraints:
result += newLine() + printConstraint(constraint)
decTab()
return result
def printArrayType(dataType):
arrayType = dataType.type
result = 'Array'
result += printDataTypeAttributes(dataType)
if (arrayType.arrayType != None):
incTab()
result += newLine() + printDataType(arrayType.arrayType)
decTab()
if (dataType.doc != None):
incTab()
result += newLine() + printDoc(dataType.doc)
decTab()
incTab()
for constraint in dataType.constraints:
result += newLine() + printConstraint(constraint)
decTab()
return result
def printDataTypeAttributes(dataType):
result = ''
if (dataType.name != None):
result += 'name="' + dataType.name + '"'
if (dataType.unitOfMeasure != None):
if (len(result) > 0):
result += ' '
result += 'unitOfMeasure="' + dataType.unitOfMeasure + '"'
if (len(result) > 0):
result = ' [' + result + ']'
return result
def printConstraint(constraint):
result = 'Constraint'
attr = ''
if (constraint.name != None):
attr += 'name="' + constraint.name + '"'
if (constraint.type != None):
if (len(attr) > 0):
attr += ' '
attr += 'type="' + constraint.type + '"'
if (constraint.value != None):
if (len(attr) > 0):
attr += ' '
attr += 'value="' + constraint.value + '"'
if (len(attr) > 0):
result += ' [' + attr + ']'
if (constraint.doc != None):
incTab()
result += newLine() + printDoc(constraint.doc)
decTab()
return result
#
# Doc
#
def printDoc(doc):
incTab()
result = 'Doc: ' + doc.content.strip()
decTab()
return result
|
wojtask/CormenPy | test/test_chapter05/test_textbook5_3.py | Python | gpl-3.0 | 589 | 0 | from unittest import TestCase
from hamcrest import *
from array_util impor | t get_random_array
from chapter05.textbook5_3 import permute_by_sorting, randomize_in_place
class TestTextbook5_3(TestCase):
def test_permute_by_sorting(self):
array, elements = get_random_array()
array = permute_by_sorting(array)
assert_that(array.elements, contains_inanyorder(*elements))
def test_randomize_in_place(self):
array, elements = get_random_array()
randomize_in_place(array)
assert_that(array.elements, contains_in | anyorder(*elements))
|
penoud/GrovePi | Software/Python/grove_barometer_sensors/barometric_sensor_bmp085/grove_barometer_example_BMP085.py | Python | mit | 1,813 | 0.00717 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Barometer module (http://www.seeedstudio.com/depot/Grove-Barometer-HighAccuracy-p-1865.html)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Softwa | re is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO T | HE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import grove_barometer_lib
b = grove_barometer_lib.barometer()
while True():
print ("Temp:",b.temperature," Pressure:",b.pressure," Altitude:",b.altitude)
b.update()
time.sleep(.1)
|
kappataumu/cookiecutter-django | hooks/pre_gen_project.py | Python | bsd-3-clause | 184 | 0.01087 | project_slug = '{{ cookiecutter.project_slug }}'
if hasattr(project_slug, 'isidentifier'):
assert project_slug.isidentifie | r(), 'Project slug should be valid P | ython identifier!'
|
iand/pynappl | old/contentbox_add.py | Python | gpl-2.0 | 936 | 0.03312 | """contentbox_add.py
Supports adding to the contentbox"""
import httplib2
from errors import *
class ContentboxAddCommand(object):
h = httplib2.Http()
def __init__(self, store_uri=None, data=None, type="application/x-unknown"):
| self.store_uri = (store_uri is not None and store_uri.endswith("/")) and store_uri[:-1] or store_uri
self.data = data
self.type = type
def set_store_uri( | self, store_uri):
self.store_uri = store_uri.endswith("/") and store_uri[:-1] or store_uri
def set_data(self, data):
self.data = data
def set_type(self, type):
self.type = type
def login(self, un, pw):
self.h.add_credentials(un, pw)
def execute(self):
uri = self.store_uri + "/items"
headers, data = self.h.request(uri, "POST", body=self.data, headers={"content-type" : self.type})
if int(headers["status"]) in range(200, 300):
pass
else:
raise CouldNotSendRequest("POST '%s': got status %s" % (uri, headers["status"]))
|
googleads/google-ads-python | google/ads/googleads/v8/services/services/merchant_center_link_service/client.py | Python | apache-2.0 | 26,385 | 0.001251 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import merchant_center_link
from google.ads.googleads.v8.services.types import merchant_center_link_service
from .transports.base import (
MerchantCenterLinkServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import MerchantCenterLinkServiceGrpcTransport
class MerchantCenterLinkServiceClientMeta(type):
"""Metaclass for the MerchantCenterLinkService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[MerchantCenterLinkServiceTransport]]
_transport_registry["grpc"] = MerchantCenterLinkServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[MerchantCenterLinkServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class MerchantCenterLinkServiceClient(
metaclass=MerchantCenterLinkServiceClientMeta
):
"""This service allows management of links between Google Ads
and Google Merchant Center.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MerchantCenterLinkServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs | ["credentials"] = credentials
return cls(*args, **kwargs)
@classmeth | od
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MerchantCenterLinkServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> MerchantCenterLinkServiceTransport:
"""Return the transport used by the client instance.
Returns:
MerchantCenterLinkServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def merchant_center_link_path(
customer_id: str, merchant_center_id: str,
) -> str:
"""Return a fully-qualified merchant_center_link string."""
return "customers/{customer_id}/merchantCenterLinks/{merchant_center_id}".format(
customer_id=customer_id, merchant_center_id=merchant_center_id,
)
@staticmethod
def parse_merchant_center_link_path(path: str) -> Dict[str, str]:
"""Parse a merchant_center_link path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/merchantCenterLinks/(?P<merchant_center_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_projec |
jberci/resolwe | resolwe/flow/serializers/contributor.py | Python | apache-2.0 | 1,499 | 0.000667 | """Resolwe contributor serializer."""
from django.contrib import auth
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.fields import empty
from resolwe.rest.serializers import SelectiveFieldMixin
class ContributorSerializer(SelectiveFieldMixin, serializers.ModelSerializer):
"""Serializer for contributor User objects."""
class Meta:
"""Serializer configuration."""
# The model needs to be determined when instantiating the serializer
# class | as the applications may not yet be ready at this point.
model = None
fields = (
'f | irst_name',
'id',
'last_name',
'username',
)
def __init__(self, instance=None, data=empty, **kwargs):
"""Initialize attributes."""
# Use the correct User model.
if self.Meta.model is None:
self.Meta.model = auth.get_user_model()
super().__init__(instance, data, **kwargs)
def to_internal_value(self, data):
"""Format the internal value."""
# When setting the contributor, it may be passed as an integer.
if isinstance(data, dict) and isinstance(data.get('id', None), int):
data = data['id']
elif isinstance(data, int):
pass
else:
raise ValidationError("Contributor must be an integer or a dictionary with key 'id'")
return self.Meta.model.objects.get(pk=data)
|
Sokrates80/air-py | aplink/messages/ap_imu.py | Python | mit | 2,582 | 0.001936 | """
airPy is a flight controller based on pyboard and written in micropython.
The MIT License (MIT)
Copyright (c) 2016 Fabrizio Scimia, fabrizio.scimia@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright n | otice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAM | AGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import struct
# TODO split this message into two: IMU and MOTORS
class ImuStatus:
def __init__(self, h_builder, attitude):
"""
This message is used to carry IMU and MOTOR related information:
IMU: 6 float in total (3 for Pitch,Roll,Yaw angles and 3 for the related angular velocities)
MOTORS: 4 float in total (1 for each motor PWM value)
:param h_builder: HeaderBuilder object
:param attitude: AttitudeController object
"""
self.attitude_controller = attitude
self.header_builder = h_builder
self.QCI = 0
self.MESSAGE_TYPE_ID = 30
self.floatList = self.attitude_controller.get_attitude_status()
self.shortlist = self.attitude_controller.get_pulse_widths()
self.PAYLOAD_IMU = struct.pack('%sf' % len(self.floatList), *self.floatList)
self.PAYLOAD_MOTORS = struct.pack('%sH' % len(self.shortlist), *self.shortlist)
self.PAYLOAD = bytearray(self.PAYLOAD_IMU) + bytearray(self.PAYLOAD_MOTORS)
self.PAYLOAD_LENGTH = len(self.PAYLOAD)
self.EOF = bytearray([self.PAYLOAD[0] & 255])
self.FAIL_SAFE = (self.attitude_controller.get_rc_controller()).get_link_status()
self.header = bytearray(h_builder.get_header(self))
self.message = self.header + self.PAYLOAD + self.EOF
def get_bytes(self):
return self.message
|
dkdeconti/PAINS-train | training_methods/clustering/dbscan_PAINS_pca.py | Python | mit | 1,174 | 0 | __author__ = 'ddeconti'
import FileHandler
import numpy
import sys
from bokeh.plotting import figure, output_file, show, VBox, HBox
from rdkit import DataStructs
from sklearn.cluster import DBSCAN
from sklearn.decomposition.pca import PCA
def train_pca(pains_fps, num_components=3):
'''
Dimensional reduction of fps bit vectors to principal componen | ts
:param pains_fps:
:return: pca reduced fingerprints bit vectors
'''
np_fps = []
for fp in pains_fps:
arr = numpy.zeros((1,))
DataStructs.ConvertToNumpyArray(fp, arr)
np_fps.append(arr)
pca = PCA(n_components=num_components)
pca.fit(np_fps)
fps_reduced = pca | .transform(np_fps)
return fps_reduced
def train_dbscan(pains_fps):
db = DBSCAN(eps=1, min_samples=10).fit(pains_fps)
print db.labels_
pass
def main(sa):
pains_filename = sa[0]
pains_fps = FileHandler.SlnFile(pains_filename).get_fingerprint_list()
reduced_pains_fps = train_pca(pains_fps, num_components=2)
train_dbscan(reduced_pains_fps)
p = figure(x_axis_label="PC1",
y_axis_label="PC2")
if __name__ == "__main__":
main(sys.argv[1:])
|
nka11/KaraCos-Desktop | py/utils/client/ssh.py | Python | gpl-3.0 | 4,477 | 0.006478 | """
KaraCos-Desktop - web platform engine client for desktop users - http://karacos.org/
Copyright (C) 2009-2010 Nicolas Karageuzian
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Created on 26 juil. 2010
@author: KaragNi
"""
import paramiko
import socket
from paramiko.ssh_exception import SSHException, BadHostKeyException
from paramiko.transport import Transport
from paramiko.resource import ResourceManager
import getpass
from utils.client.http import Connection,HttpProxyConnection
from utils import tunnel
import ssl
SSH_PORT = 22
import desktop
class SSHClient(paramiko.SSHClient):
"""
Subclassing paramiko sshclient to add proxy via feature
"""
def connect(self, hostname, port=SSH_PORT, username=None, password=None, pkey=None,
key_filename=None, timeout=None, allow_agent=True, look_for_keys=True):
"""
"""
sock = None
if not desktop.Config()['proxy']:
for (family, socktype, proto, canonname, sockaddr) in socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM):
if socktype == socket.SOCK_STREAM:
af = family
addr = sockaddr
break
else:
raise SSHException('No suitable address family for %s' % hostname)
sock = socket.socket(af, socket.SOCK_STREAM)
if timeout is not None:
try:
sock.settimeout(timeout)
except:
pass
sock.connect(addr)
else:
proxy_connexion = HttpProxyConnection((hostname,port),[desktop.Config()['proxy']['proxy'], desktop.Config()['proxy']['proxy_port']])
#conn = HttpProxyConnection((hostname,port),(desktop.Config()['proxy']['proxy'], desktop.Config()['proxy']['proxy_port']))
| proxy_connexion.establish()
sock = proxy_connexion.socket
if sock == None:
raise Exception("Erreur RESEAU")
#self.sock = ssl.wrap_socket(sock)
#sock = ssl.wrap_socket(proxy_connexion.socket,do_handshake_on_connect=False)
#sock = proxy_connexion.socket
#sock.connect((hostname,port))
t = s | elf._transport = Transport(sock)
t.banner_timeout = 180
if self._log_channel is not None:
t.set_log_channel(self._log_channel)
t.start_client()
ResourceManager.register(self, t)
server_key = t.get_remote_server_key()
keytype = server_key.get_name()
if port == SSH_PORT:
server_hostkey_name = hostname
else:
server_hostkey_name = "[%s]:%d" % (hostname, port)
our_server_key = self._system_host_keys.get(server_hostkey_name, {}).get(keytype, None)
if our_server_key is None:
our_server_key = self._host_keys.get(server_hostkey_name, {}).get(keytype, None)
if our_server_key is None:
# will raise exception if the key is rejected; let that fall out
self._policy.missing_host_key(self, server_hostkey_name, server_key)
# if the callback returns, assume the key is ok
our_server_key = server_key
if server_key != our_server_key:
raise BadHostKeyException(hostname, server_key, our_server_key)
if username is None:
username = getpass.getuser()
if key_filename is None:
key_filenames = []
elif isinstance(key_filename, (str, unicode)):
key_filenames = [ key_filename ]
else:
key_filenames = key_filename
self._auth(username, password, pkey, key_filenames, allow_agent, look_for_keys) |
zhangg/trove | trove/tests/unittests/secgroups/test_security_group.py | Python | apache-2.0 | 6,773 | 0 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from mock import Mock
from mock import patch
from novaclient import exceptions as nova_exceptions
from trove.common import cfg
from trove.common import exception
import trove.common.remote
from trove.extensions.security_group import models as sec_mod
from trove.instance import models as inst_model
from trove.tests.fakes import nova
from trove.tests.unittests import trove_testtools
CONF = cfg.CONF
"""
Unit tests for testing the exceptions raised by Security Groups
"""
class Security_Group_Exceptions_Test(trove_testtools.TestCase):
def setUp(self):
super(Security_Group_Exceptions_Test, self).setUp()
self.createNovaClient = trove.common.remote.create_nova_client
self.context = trove_testtools.TroveTestContext(self)
self.FakeClient = nova.fake_create_nova_client(self.context)
fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
nova_exceptions.ClientException("Test")))
self.FakeClient.security_groups.create = fException
self.FakeClient.security_groups.delete = fException
self.FakeClient.security_group_rules.create = fException
self.FakeClient.security_group_rules.delete = fException
trove.common.remote.create_nova_client = (
lambda c, r: self._return_mocked_nova_client(c))
def tearDown(self):
super(Security_Group_Exceptions_Test, self).tearDown()
trove.common.remote.create_nova_client = self.createNovaClient
def _return_mocked_nova_client(self, context):
return self.FakeClient
def _raise(self, ex):
raise ex
@patch('trove.network.nova.LOG')
def test_failed_to_create_security_group(self, mock_logging):
self.assertRaises(exception.SecurityGroupCreationError,
sec_mod.RemoteSecurityGroup.create,
"TestName",
"TestDescription",
self.context,
region_name=CONF.os_region_name)
@patch('trove.network.nova.LOG')
def test_failed_to_delete_security_group(self, mock_logging):
self.assertRaises(exception.SecurityGroupDeletionError,
sec_mod.RemoteSecurityGroup.delete,
1, self.context,
region_name=CONF.os_region_name)
@patch('trove.network.nova.LOG')
def test_failed_to_create_security_group_rule(self, mock_logging):
self.assertRaises(exception.SecurityGroupRuleCreationError,
sec_mod.RemoteSecurityGroup.add_rule,
1, "tcp", 3306, 3306, "0.0.0.0/0", self.context,
region_name=CONF.os_region_name)
@patch('trove.network | .nova.LOG')
def test_failed_to_delete_security_group_rule(self, mock_logging):
self.assertRaises(exception.SecurityGroupRuleDeletionError,
| sec_mod.RemoteSecurityGroup.delete_rule,
1, self.context,
region_name=CONF.os_region_name)
class fake_RemoteSecGr(object):
def data(self):
self.id = uuid.uuid4()
return {'id': self.id}
def delete(self, context, region_name):
pass
class fake_SecGr_Association(object):
def get_security_group(self):
return fake_RemoteSecGr()
def delete(self):
pass
class SecurityGroupDeleteTest(trove_testtools.TestCase):
def setUp(self):
super(SecurityGroupDeleteTest, self).setUp()
self.inst_model_conf_patch = patch.object(inst_model, 'CONF')
self.inst_model_conf_mock = self.inst_model_conf_patch.start()
self.addCleanup(self.inst_model_conf_patch.stop)
self.context = trove_testtools.TroveTestContext(self)
self.original_find_by = (
sec_mod.SecurityGroupInstanceAssociation.find_by)
self.original_delete = sec_mod.SecurityGroupInstanceAssociation.delete
self.fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
exception.ModelNotFoundError()))
def tearDown(self):
super(SecurityGroupDeleteTest, self).tearDown()
(sec_mod.SecurityGroupInstanceAssociation.
find_by) = self.original_find_by
(sec_mod.SecurityGroupInstanceAssociation.
delete) = self.original_delete
def _raise(self, ex):
raise ex
def test_failed_to_get_assoc_on_delete(self):
sec_mod.SecurityGroupInstanceAssociation.find_by = self.fException
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
uuid.uuid4(), self.context, CONF.os_region_name))
def test_get_security_group_from_assoc_with_db_exception(self):
fException = Mock(
side_effect=lambda *args, **kwargs: self._raise(
nova_exceptions.ClientException('TEST')))
i_id = uuid.uuid4()
class new_fake_RemoteSecGrAssoc(object):
def get_security_group(self):
return None
def delete(self):
return fException
sec_mod.SecurityGroupInstanceAssociation.find_by = Mock(
return_value=new_fake_RemoteSecGrAssoc())
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
i_id, self.context, CONF.os_region_name))
def test_delete_secgr_assoc_with_db_exception(self):
i_id = uuid.uuid4()
sec_mod.SecurityGroupInstanceAssociation.find_by = Mock(
return_value=fake_SecGr_Association())
sec_mod.SecurityGroupInstanceAssociation.delete = self.fException
self.assertIsNotNone(sec_mod.SecurityGroupInstanceAssociation.find_by(
i_id, deleted=False).get_security_group())
self.assertTrue(hasattr(sec_mod.SecurityGroupInstanceAssociation.
find_by(i_id, deleted=False).
get_security_group(), 'delete'))
self.assertIsNone(
sec_mod.SecurityGroup.delete_for_instance(
i_id, self.context, CONF.os_region_name))
|
wwright2/dcim3-angstrom1 | sources/bitbake/lib/bb/data.py | Python | mit | 15,316 | 0.006399 | # ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
The expandKeys and update_data are the most expensive
operations. At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
applying the skills from the not yet passed 'Entwurf und
Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
to have faster update_data and expandKeys operations.
This is a trade-off between speed and memory again but
the speed is more critical here.
"""
# Copyright (C) 2003, 2004 Chris Larson
# Copyright (C) 2005 Holger Hans Peter Freyther
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import sys, os, re
if sys.argv[0][-5:] == "pydoc":
path = os.path.dirname(os.path.dirname(sys.argv[1]))
else:
path = os.path.dirname(os.path.dirname(sys.argv[0]))
sys.path.insert(0, path)
from itertools import groupby
from bb import data_smart
from bb import codeparser
import bb
logger = data_smart.logger
_dict_type = data_smart.DataSmart
def init():
"""Return a new object representing the Bitbake data"""
| return _dict_type()
def init_db(parent = None):
"""Return a new object representing the Bitbake data,
optionally based on an existing object"""
if parent is not None:
return parent.createCopy()
else:
return _dict_type()
def createCopy(source):
"""Link the source set to the destination
If one does not find the value in the destination set,
search will go on to the source set to get the value.
Value from so | urce are copy-on-write. i.e. any try to
modify one of them will end up putting the modified value
in the destination set.
"""
return source.createCopy()
def initVar(var, d):
"""Non-destructive var init for data structure"""
d.initVar(var)
def setVar(var, value, d):
"""Set a variable to a given value"""
d.setVar(var, value)
def getVar(var, d, exp = 0):
"""Gets the value of a variable"""
return d.getVar(var, exp)
def renameVar(key, newkey, d):
"""Renames a variable from key to newkey"""
d.renameVar(key, newkey)
def delVar(var, d):
"""Removes a variable from the data set"""
d.delVar(var)
def appendVar(var, value, d):
"""Append additional value to a variable"""
d.appendVar(var, value)
def setVarFlag(var, flag, flagvalue, d):
"""Set a flag for a given variable to a given value"""
d.setVarFlag(var, flag, flagvalue)
def getVarFlag(var, flag, d):
"""Gets given flag from given var"""
return d.getVarFlag(var, flag)
def delVarFlag(var, flag, d):
"""Removes a given flag from the variable's flags"""
d.delVarFlag(var, flag)
def setVarFlags(var, flags, d):
"""Set the flags for a given variable
Note:
setVarFlags will not clear previous
flags. Think of this method as
addVarFlags
"""
d.setVarFlags(var, flags)
def getVarFlags(var, d):
"""Gets a variable's flags"""
return d.getVarFlags(var)
def delVarFlags(var, d):
"""Removes a variable's flags"""
d.delVarFlags(var)
def keys(d):
"""Return a list of keys in d"""
return d.keys()
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
__expand_python_regexp__ = re.compile(r"\${@.+?}")
def expand(s, d, varname = None):
"""Variable expansion using the data store"""
return d.expand(s, varname)
def expandKeys(alterdata, readdata = None):
if readdata == None:
readdata = alterdata
todolist = {}
for key in alterdata:
if not '${' in key:
continue
ekey = expand(key, readdata)
if key == ekey:
continue
todolist[key] = ekey
# These two for loops are split for performance to maximise the
# usefulness of the expand cache
for key in todolist:
ekey = todolist[key]
newval = alterdata.getVar(ekey, 0)
if newval:
val = alterdata.getVar(key, 0)
if val is not None and newval is not None:
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
alterdata.renameVar(key, ekey)
def inheritFromOS(d, savedenv, permitted):
"""Inherit variables from the initial environment."""
exportlist = bb.utils.preserved_envvars_exported()
for s in savedenv.keys():
if s in permitted:
try:
d.setVar(s, getVar(s, savedenv, True), op = 'from env')
if s in exportlist:
d.setVarFlag(s, "export", True, op = 'auto env export')
except TypeError:
pass
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
"""Emit a variable to be sourced by a shell."""
if getVarFlag(var, "python", d):
return 0
export = getVarFlag(var, "export", d)
unexport = getVarFlag(var, "unexport", d)
func = getVarFlag(var, "func", d)
if not all and not export and not unexport and not func:
return 0
try:
if all:
oval = getVar(var, d, 0)
val = getVar(var, d, 1)
except (KeyboardInterrupt, bb.build.FuncFailed):
raise
except Exception as exc:
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
return 0
if all:
d.varhistory.emit(var, oval, val, o)
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
return 0
varExpanded = expand(var, d)
if unexport:
o.write('unset %s\n' % varExpanded)
return 0
if val is None:
return 0
val = str(val)
if varExpanded.startswith("BASH_FUNC_"):
varExpanded = varExpanded[10:-2]
val = val[3:] # Strip off "() "
o.write("%s() %s\n" % (varExpanded, val))
o.write("export -f %s\n" % (varExpanded))
return 1
if func:
# NOTE: should probably check for unbalanced {} within the var
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
return 1
if export:
o.write('export ')
# if we're going to output this within doublequotes,
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val)
alter = re.sub('\n', ' \\\n', alter)
alter = re.sub('\\$', '\\\\$', alter)
o.write('%s="%s"\n' % (varExpanded, alter))
return 0
def emit_env(o=sys.__stdout__, d = init(), all=False):
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
isfunc = lambda key: bool(d.getVarFlag(key, "func"))
keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
grouped = groupby(keys, isfunc)
for isfunc, keys in grouped:
for key in keys:
emit_var(key, o, d, all and not isfunc) and o.write('\n')
def exported_keys(d):
return (key for key in d.keys() if not key.startswith('__') and
d.getVarFlag(key, 'export') and
not d.getVarFlag(key, 'unexport'))
def exported_vars(d):
for key in exported_keys(d):
try:
value = d.getVar(key, True)
except Exception:
pass
if value is not None:
|
escattone/kuma | kuma/users/tests/test_auth.py | Python | mpl-2.0 | 720 | 0 | from django.test import RequestFactory
from kuma.users.auth import logout_url
# TODO: Check which new tests are needed.
def test_logout_url(settings):
| request = RequestFactory().get("/some/path")
request.session = {}
url = logout_url(request)
assert url == "/"
request = Requ | estFactory().get("/some/path?next=/docs")
request.session = {}
url = logout_url(request)
assert url == "/docs"
settings.LOGOUT_REDIRECT_URL = "/loggedout"
request = RequestFactory().get("/some/path")
request.session = {}
url = logout_url(request)
assert url == "/loggedout"
request.session["oidc_login_next"] = "/original"
url = logout_url(request)
assert url == "/original"
|
ahaberlie/MetPy | src/metpy/_vendor/xarray.py | Python | bsd-3-clause | 2,816 | 0.001065 | # Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Vendor core functionality used from xarray.
This code has been reproduced with modification under the terms of the Apache License, Version
2.0 (notice included below).
Copyright 2014-2019, xarray Developers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def expanded_indexer(key, ndim):
"""Expand an indexer to a tuple with length ndim.
Given a key for indexing an ndarray, return an equivalent key which is a
tuple with length equal to the number of dimensions.
The expansion is done by replacing all `Ellipsis` items with the right
number of full slices and then padding the key with full slices so that it
reaches the appropriate dimensionality. |
"""
if not isinstance(key, tuple):
# numpy treats non-tuple keys equivalent to tuples of length 1
key = (key,)
new_key = []
# handling Ellipsis right is a little tricky, see:
# http://docs.scipy.org/doc/numpy/reference/arrays.indexing.html#advanced-indexing
found_ellipsis = False
for k in key:
if k is Ellipsis:
if not found_ellipsis:
new_key.extend((ndim + 1 - len(key)) * [slice(None)])
| found_ellipsis = True
else:
new_key.append(slice(None))
else:
new_key.append(k)
if len(new_key) > ndim:
raise IndexError('too many indices')
new_key.extend((ndim - len(new_key)) * [slice(None)])
return tuple(new_key)
def is_dict_like(value):
"""Check if value is dict-like."""
return hasattr(value, 'keys') and hasattr(value, '__getitem__')
def either_dict_or_kwargs(pos_kwargs, kw_kwargs, func_name):
"""Ensure dict-like argument from either positional or keyword arguments."""
if pos_kwargs is not None:
if not is_dict_like(pos_kwargs):
raise ValueError('the first argument to .{} must be a '
'dictionary'.format(func_name))
if kw_kwargs:
raise ValueError('cannot specify both keyword and positional arguments to '
'.{}'.format(func_name))
return pos_kwargs
else:
return kw_kwargs
|
jokey2k/sentry | src/sentry/features/__init__.py | Python | bsd-3-clause | 510 | 0 | from __futu | re__ import absolute_import
|
from .base import * # NOQA
from .handler import * # NOQA
from .manager import * # NOQA
default_manager = FeatureManager()
default_manager.add('auth:register')
default_manager.add('social-auth:register')
default_manager.add('organizations:create')
default_manager.add('organizations:sso', OrganizationFeature)
default_manager.add('teams:create', OrganizationFeature)
# expose public api
add = default_manager.add
get = default_manager.get
has = default_manager.has
|
armadill-odyssey/aima-python | submissions/Dickenson/myCSPs.py | Python | mit | 1,998 | 0.026527 | import csp
rgb = ['R', 'G', 'B']
domains = {
'Aosta Valley': rgb,
'Piedmont': rgb,
'Liguria': rgb,
'Lombardy': rgb,
'Trentino': rgb,
'South Tyrol': rgb,
'Veneto': rgb,
'Friuli-Venezia Giulia': rgb,
'Emilia-Romagna': rgb,
'Tuscany': rgb,
'Umbria': rgb,
'Marche': rgb,
'Lazio': rgb,
'Abruzzo': rgb,
'Molise': rgb,
'Campania': rgb,
'Apulia': rgb,
'Basilicata': rgb,
'Calabria': rgb,
}
neighbors = {
'Aosta Valley': ['Piedmont'],
'Piedmont': ['Liguria',' | Lombardy','Emilia-Romagna'],
'Liguria': ['Piedmont','Emilia-Romagna','Tuscany'],
'Lombardy': ['Piedmont','Emilia-Romagna','Veneto','Trentino','South Tyrol'],
'Trentino': ['South Tyrol','Veneto','Lombardy'],
'South Tyrol': ['Lombardy','Trentino','Veneto'],
| 'Veneto': ['Friuli-Venezia Giulia','Trentino','South Tyrol','Lombardy','Emilia-Romagna'],
'Friuli-Venezia Giulia': ['Veneto'],
'Emilia-Romagna': ['Veneto','Lombardy','Tuscany','Liguria','Marche','Piedmont'],
'Tuscany': ['Liguria','Emilia-Romagna','Marche','Umbria','Lazio'],
'Umbria': ['Tuscany','Lazio','Marche'],
'Marche': ['Emilia-Romagna','Tuscany','Umbria','Lazio','Abruzzo'],
'Lazio': ['Tuscany','Umbria','Abruzzo','Molise','Campania'],
'Abruzzo': ['Marche','Lazio','Molise'],
'Molise': ['Abruzzo','Lazio','Campania','Apulia'],
'Campania': ['Lazio','Molise','Apulia','Basilicata'],
'Apulia': ['Molise','Campania','Basilicata'],
'Basilicata': ['Apulia','Campania','Calabria'],
'Calabria': ['Basilicata'],
}
vars = domains.keys()
domains = {}
for v in vars:
domains[v] = ['R', 'G', 'B', 'P', 'O', 'T', 'M']
def constraints(A, a, B, b):
if A == B: # e.g. NSW == NSW
return True
if a == b: # e.g. WA = G and SA = G
return False
return True
myItalymap = csp.CSP(vars, domains, neighbors, constraints)
myCSPs = [
{'csp': myItalymap,
# 'select_unassigned_variable':csp.mrv,
}
] |
mmktomato/zenmai-bts | web/models/state.py | Python | mit | 690 | 0.001449 | """State c | lass definition."""
from . import get_db
db = get_db()
class State(db.Model):
"""State class.
Extends Model of 'Flask-SQLAlchemy'.
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(32), nullable=Fal | se)
value = db.Column(db.Integer, unique=True, nullable=False)
def __init__(self, name, value):
"""Creates a instance of this class."""
self.name = name
self.value = value
def __repr(self):
return '{}([])'.format(self.name, self.value)
@classmethod
def all(cls):
"""Returns all states. Ordered by 'value'."""
return cls.query.order_by(cls.value.asc()).all()
|
gaocegege/treadmill | treadmill/cli/admin/checkout/ldap.py | Python | apache-2.0 | 551 | 0 | """Checkout LDAP infrastructure."""
import click
from treadmill import cli
from tr | eadmill import context
from treadmill.checkout import ldap as ldap_test
def init():
"""Top level command handler."""
@click.command('ldap')
@click.option('--ldap-list', required=True, envvar='TREADMILL_LDAP_LIST',
type=cli.LIST)
def check_ldap(ldap_list):
"""Checkout LDAP infra."""
ldap_suffix = context.GLOBAL.ldap.ldap_suffix
re | turn lambda: ldap_test.test(ldap_list, ldap_suffix)
return check_ldap
|
kidaa/entropy | lib/tests/core.py | Python | gpl-2.0 | 1,396 | 0.004298 | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, 'client')
sys.path.insert(0, '../../client')
sys.path.insert(0, '.')
sys.path.insert(0, '../')
import unittest
from entropy.core import EntropyPluginStore, Singleton
from entropy.core.settings.base import SystemSettings
import tests._misc as _misc
class CoreTest(unittest.TestCase):
def test_plugin_store(self):
store = EntropyPluginStore()
plug_object = object()
plug_id = "plug"
store.add_plugin(plug_id, plug_object)
self.assertEqual(store.get_plugins(), {plug_id: plug_object})
store.remove_plugin(plug_id)
self.assertEqual(store.get_plugins(), {})
store.add_plugin(plug_id, plug_object)
self.assertEqual(store.get_plugins(), {plug_id: plug_object})
store.drop_plugins()
self.assertEqual(store.get_plugins(), {})
def test_plugin_updatable_config_files(self):
sys_set = SystemSettings()
files = sys_set.get_updatable_configuration_files(None)
self.assertTrue(isinstance(files, set))
sel | f.assertTrue(files) # not empty
def test_core_singleton(self):
class myself(Singleton):
def init_singleton(self):
| pass
obj = myself()
obj2 = myself()
self.assertTrue(obj is obj2)
if __name__ == '__main__':
unittest.main()
raise SystemExit(0)
|
NicovincX2/Python-3.5 | Physique/Mouvement/Dynamique/Systèmes oscillants/Pendule/animate_a_pendulum_pygame.py | Python | gpl-3.0 | 2,683 | 0.001118 | # -*- coding: utf-8 -*-
import os
import pygame
import sys
from pygame.locals import *
from math import sin, cos, radians
pygame.init()
WINDOWSIZE = 250
TIMETICK = 100
BOBSIZE = 15
window = pygame.display.set_mode((WINDOWSIZE, WINDOWSIZE))
pygame.display.set_caption("Pendulum")
screen = pygame.display.get_surface()
screen.fill((255, 255, 255))
PIVOT = (WINDOWSIZE / 2, WINDOWSIZE / 10)
SWINGLENGTH = PIVOT[1] * 4
class BobMass(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.theta = 45
self.dtheta = 0
self.rect = pygame.Rect(PIVOT[0] - SWINGLENGTH * cos(radians(self.theta)),
PIVOT[1] + SWINGLENGTH *
sin(radians(self.theta)),
1, 1)
self.draw()
def recomputeAngle(self):
scaling = 3000.0 / (SWINGLENGTH**2)
firstDDtheta = -sin(radians(self.theta)) * scaling
midDtheta = self.dtheta + firstDDtheta
midtheta = self.theta + (self.dtheta + midDtheta) / 2.0
midDDtheta = -sin(radians(midtheta)) * scaling
midDtheta = self.dtheta + (firstDDtheta + midDDtheta) / 2
midtheta = self.theta + (self.dtheta + midDtheta) / 2
midDDtheta = -sin(radians(midtheta)) * scaling
lastDtheta = midDtheta + midDDtheta
lasttheta = midtheta + (midDtheta + lastDtheta) / 2.0
lastD | Dtheta = -sin(radians(lasttheta)) * scaling
lastDtheta = midDtheta + (midDDtheta + lastDDtheta) / 2.0
lasttheta = midtheta + (midDtheta + lastDtheta) / 2.0
self.dtheta = lastDtheta
self.theta = lasttheta
self.rect = pygame.Rect(PIVOT[0] -
SWINGLENGTH * sin(radians(self.theta)),
PIVOT[1] +
SWINGLENGTH * cos(radians(self.theta)), 1, 1)
| def draw(self):
pygame.draw.circle(screen, (0, 0, 0), PIVOT, 5, 0)
pygame.draw.circle(screen, (0, 0, 0), self.rect.center, BOBSIZE, 0)
pygame.draw.aaline(screen, (0, 0, 0), PIVOT, self.rect.center)
pygame.draw.line(screen, (0, 0, 0),
(0, PIVOT[1]), (WINDOWSIZE, PIVOT[1]))
def update(self):
self.recomputeAngle()
screen.fill((255, 255, 255))
self.draw()
bob = BobMass()
TICK = USEREVENT + 2
pygame.time.set_timer(TICK, TIMETICK)
def input(events):
for event in events:
if event.type == QUIT:
sys.exit(0)
elif event.type == TICK:
bob.update()
while True:
input(pygame.event.get())
pygame.display.flip()
os.system("pause")
|
yfpeng/pengyifan-leetcode | src/main/python/pyleetcode/fizz_buzz.py | Python | bsd-3-clause | 999 | 0.003003 | """
Write a program that outputs the string representation of | numbers from 1 to n.
But for multiples of three it should output "Fizz" instead of the number and for the multiples of five output "Buzz".
For numbers which are multiples of both three and five output "FizzBuzz".
"""
def fizz_buzz(n):
"""
:type n: int
:rtype: List[str]
"""
output = []
for i in range(1, n+1):
if i % 15 == 0:
output.append('FizzBuzz')
elif i % 3 == 0:
output.append('Fizz')
elif i % 5 == 0:
outpu | t.append('Buzz')
else:
output.append(str(i))
return output
def test_fizz_buzz():
expected = [
"1",
"2",
"Fizz",
"4",
"Buzz",
"Fizz",
"7",
"8",
"Fizz",
"Buzz",
"11",
"Fizz",
"13",
"14",
"FizzBuzz"
]
assert fizz_buzz(15) == expected
if __name__ == '__main__':
test_fizz_buzz() |
ninchat/offhand | python/offhand/__init__.py | Python | bsd-2-clause | 1,817 | 0.00055 | __all__ = [
"CorruptedMessage",
"Stats",
"UnexpectedCommand",
"UnexpectedEOF",
"UnknownCommand",
"log",
]
import logging
import struct
log = logging.getLogger("offhand")
class UnexpectedEOF(Exception):
def __init__(self):
Exception.__init__(self, "Connection closed unexpectedly")
class UnknownCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unknown command: %r" % command)
class UnexpectedCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unexpected command: %r" % command)
class CorruptedMessage(Exception):
def __init__(self):
Exception.__init__(self, "Corrupted message")
class Stats(object):
__slots__ = [
"connecting",
"connected",
"idle",
"busy",
"total_engaged",
"total_canceled",
"total_rolledback",
"total_timeouts",
"total_disconnects",
"total_errors",
]
def __init__(self, copy=None):
for key in self.__slots__:
setattr(self, key, getattr(copy, key) if copy else 0)
def __nonzero__(self):
return any(getattr(self, key) for key in self.__slots__)
def __str__(self):
return " ".join("%s=%s" % (key, getattr(self, key)) for key in self.__slots__)
def parse_message(data):
message = []
offset = 0
while True:
remain = len(data) - offset
if remain == 0:
break
if remain < 4:
raise CorruptedMessage()
|
part_size, = struct.unpack("<I", data[offset: offset + 4])
offset += 4
| if remain < 4 + part_size:
raise CorruptedMessage()
message.append(data[offset: offset + part_size])
offset += part_size
return message
|
shrimpboyho/herblore | pyglet-1.1.4/examples/programming_guide/events.py | Python | gpl-2.0 | 2,441 | 0.002868 | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
| '''
__docformat__ = 'restructuredtext'
__version__ = '$Id: events.py 1966 2008-03-23 07:01:26Z Alex.Holkner $'
import pyglet
from pyglet.window import key
from pyglet.window import mouse
window = pyglet.window.Window()
@window.event
def on_key_press(symbol, modifiers):
if symbol == key.A:
print 'The "A" key was pressed.'
elif symbol == key.LEFT:
print 'The left arrow key was pressed.'
elif symbol == key.ENTER:
print ' | The enter key was pressed.'
@window.event
def on_mouse_press(x, y, button, modifiers):
if button == mouse.LEFT:
print 'The left mouse button was pressed.'
@window.event
def on_draw():
window.clear()
pyglet.app.run()
|
itsmeolivia/language-learner | translate.py | Python | mit | 241 | 0.029046 | from textblob import TextBlob
def to_english(message, original_language=None):
blob = TextBlob(text)
if original_language is not None:
return blob.translate(from_lang=orig | inal_language, to="en")
el | se:
return blob.translate(to="en") |
quixey/python-aliyun | tests/unit/aliyun/slb/model_test.py | Python | apache-2.0 | 7,883 | 0.000761 | # -*- coding:utf-8 -*-
# Copyright 2014, Quixey Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import aliyun.slb.connection as slb
import unittest
from aliyun.slb.model import (
BackendServer,
BackendServerStatus,
HTTPListener,
LoadBalancer,
LoadBalancerStatus,
Listener,
ListenerStatus,
Region,
TCPListener
)
class SlbRegionTest(unittest.TestCase):
def testRegionEqual(self):
r1 = Region('id1')
r2 = Region('id1')
self.assertEqual(r1, r2)
def testRegionNotEqual(self):
r1 = Region('id1')
r2 = Region('id2')
self.assertNotEqual(r1, r2)
def testRegionRepr(self):
r = Region('id')
self.assertTrue(repr(r).startswith('<SLBRegion id at'))
class SlbLoadBalancerStatusTest(unittest.TestCase):
def testLoadBalancerStatusEqual(self):
lbs1 = LoadBalancerStatus('id1', 'name1', 'status1')
lbs2 = LoadBalancerStatus('id1', 'name1', 'status1')
self.assertEqual(lbs1, lbs2)
def testLoadBalancerStatusNotEqual(self):
lb1 = LoadBalancerStatus('id1', 'name1', 'status1')
lb2 = LoadBalancerStatus('id2', 'name2', 'status2')
self.assertNotEqual(lb1, lb2)
def testLoadBalancerStatusIDNotEqual(self):
lb1 = LoadBalancerStatus('id1', 'name1', 'status1')
lb2 = LoadBalancerStatus('id2', 'name1', 'status1')
self.assertNotEqual(lb1, lb2)
def testLoadBalancerStatusNameNotEqual(self):
lb1 = LoadBalancerStatus('id1', 'name1', 'status1')
lb2 = LoadBalancerStatus('id1', 'name2', 'status1')
self.assertNotEqual(lb1, lb2)
def testLoadBalancerStatusStatusNotEqual(self):
lb1 = LoadBalancerStatus('id1', 'name1', 'status1')
lb2 = LoadBalancerStatus('id1', 'name1', 'status2')
self.assertNotEqual(lb1, lb2)
def testLoadBalancerStatusRepr(self):
lb1 = LoadBalancerStatus('id', 'name', 'status')
self.assertTrue(
repr(lb1).startswith('<LoadBalancerStatus id is status at'))
class SlbLoadBalancerTest(unittest.TestCase):
def testNoLoadBalancerId(self):
try:
LoadBalancer(
None,
'region',
'name',
'status',
'ip',
True,
[1, 2],
['bs1', 'bs2']) # BackendServers are not validated
self.fail('Error expected without load balancer id')
except slb.Error as err:
self.assertTrue('requires load_balancer_id' in str(err))
def testLBEqual(self):
lb1 = LoadBalancer(
'id',
'region',
'name',
'status',
'ip',
True,
[1,
2])
lb2 = LoadBalancer(
'id',
'region',
'name',
'status',
'ip',
True,
[1,
2])
self.assertEqual(lb1, lb2)
def testLBNotEqual(self):
lb1 = LoadBalancer(
'id',
'region',
'name',
'status',
'ip',
True,
[1,
2])
lb2 = LoadBalancer(
'id',
'region',
'name2',
'status',
'ip',
True,
[1,
2])
self.assertNotEqual(lb1, lb2)
def testRepr(self):
lb = LoadBalancer(
'id',
'region',
'name',
'status',
'ip',
True,
[1,
2])
self.assertTrue(repr(lb).startswith('<LoadBalancer id (name) at'))
class BackendServerTest(unittest.TestCase):
def testEqual(self):
bs1 = BackendServer('id', 1)
bs2 = BackendServer('id', 1)
self.assertEqual(bs1, bs2)
def testNotEqual(self):
bs1 = BackendServer('id', 1)
bs2 = BackendServer('id2', 1)
self.assertNotEqual(bs1, bs2)
def testRepr(self):
bs = BackendServer('id', 1)
self.assertTrue(repr(bs).startswith(u'<BackendServer id'))
class ListenerStatusTest(unittest.TestCase):
def testEqual(self):
bs1 = BackendServer('id1', 1)
bs2 = BackendServer('id2', 1)
ls1 = ListenerStatus(1, [bs1, bs2])
ls2 = ListenerStatus(1, [bs1, bs2])
self.assertEqual(ls1, ls2)
def testPortNotEqual(self):
bs1 = BackendServer('id1', 1)
bs2 = BackendServer('id2', 1)
ls1 = ListenerStatus(1, [bs1, bs2])
ls2 = ListenerStatus(2, [bs1, bs2])
self.assertNotEqual(ls1, ls2)
def testBackendsNotEqual(self):
bs1 = BackendServer('id1', 1)
bs2 = BackendServer('id2', 1)
bs3 = BackendServer('id3', 1)
bs4 = BackendServer('id4', 1)
ls1 = ListenerStatus(1, [bs1, bs2])
ls2 = ListenerStatus(1, [bs3, bs4])
self.assertNotEqual(ls1, ls2)
def testListenerStatusRepr(self):
ls = ListenerStatus(1, [])
self.assertTrue(repr(ls).startswith(u'<ListenerSt | atus 1 at '))
class TCPListenerTest(unittest.TestCase):
def testEqual(self):
l1 = TCPListener('id', 1, 1)
l2 = TCPListener('id', 1, 1)
self.assertEqual(l1, l2)
def testNotEqual(self):
l1 = TCPListener('id', 1, 1)
l2 = TCPListener('id', 1, 2)
self.assertNotEqual(l1, l2)
def testRepr(self):
list | ener = TCPListener('id', 1, 1)
self.assertTrue(repr(listener).startswith(u'<TCPListener on 1 for id'))
class HTTPListenerTest(unittest.TestCase):
def testEqual(self):
l1 = HTTPListener('id', 1, 1)
l2 = HTTPListener('id', 1, 1)
self.assertEqual(l1, l2)
def testNotEqual(self):
l1 = HTTPListener('id', 1, 1)
l2 = HTTPListener('id', 1, 2)
self.assertNotEqual(l1, l2)
def testStickyMismatch(self):
try:
lstn = HTTPListener('id', 1, 1, sticky_session=True)
self.fail("sticky_session mismatches sticky_session_type.")
except slb.Error as e:
self.assertTrue('sticky_session_type must be specified' in str(e))
def testStickyServerCookie(self):
try:
lstn = HTTPListener('id', 1, 1,
sticky_session=True,
sticky_session_type='server')
self.fail(
'cookie must be specified when using '
'sticky_session_type="server"')
except slb.Error as e:
self.assertTrue(
'cookie must be specified when using '
'sticky_session_type' in str(e))
def testRepr(self):
lstn = HTTPListener('id', 1, 1)
self.assertTrue(repr(lstn).startswith(u'<HTTPListener on 1 at '))
class BackendServerStatusTest(unittest.TestCase):
def testEqual(self):
bss1 = BackendServerStatus('id', 's')
bss2 = BackendServerStatus('id', 's')
self.assertEqual(bss1, bss2)
def testNotEqual(self):
bss1 = BackendServerStatus('id1', 's')
bss2 = BackendServerStatus('id2', 's')
self.assertNotEqual(bss1, bss2)
def testRepr(self):
bss = BackendServerStatus('id', 's')
self.assertTrue(
repr(bss).startswith(u'<BackendServerStatus id is s at '))
|
EthanBlackburn/sync-engine | tests/general/test_tags_updates.py | Python | agpl-3.0 | 4,145 | 0.000483 | import pytest
import datetime
from inbox.models.account import Account
from inbox.models.thread import Thread
from inbox.models.folder import Folder, FolderItem
from inbox.models.message import Message
from inbox.models.backends.imap import ImapUid
from inbox.mailsync.backends.imap.common import (recompute_thread_labels,
add_any_new_thread_labels,
update_unread_status)
ACCOUNT_ID = 1
THREAD_ID = 1
def test_recompute_thread_labels(db):
# This is smoke test that checks that a lone label gets
# added to a thread's labels.
thread = db.session.query(Thread).get(THREAD_ID)
g_labels = thread.messages[-1].imapuids[-1].g_labels
g_labels.append('Random-label-1')
recompute_thread_labels(thread, db.session)
folders = {folder.name: folder for folder in thread.folders}
assert 'random-label-1' in folders
def test_recompute_thread_labels_removes_trash(db):
account = db.session.query(Account).get(ACCOUNT_ID)
thread = db.session.query(Thread).get(THREAD_ID)
account.trash_folder = Folder(name='Trash', account_id=account.id)
db.session.flush()
# Check that the we remove the trash folder from a thread
# if the latest message has the inbox flag.
# To do this, we manufacture this situation.
g_labels = thread.messages[-1].imapuids[-1].g_labels
if '\\Inbox' not in g_labels:
g_labels.append('\\Inbox')
thread.folders.add(account.trash_folder)
recompute_thread_labels(thread, db.session)
assert account.trash_folder not in thread.folders,\
"should have removed trash folder from thread"
def test_adding_message_to_thread(db):
"""recompute_thread_labels is not invoked when a new message is added
(only when UID metadata changes, or when a UID is deleted). Test that
tag changes work when adding messages to a thread."""
account = db.session.query(Account).get(ACCOUNT_ID)
account.namespace.create_canonical_tags()
thread = db.session.query(Thread).get(THREAD_ID)
account.trash_folder = Folder(name='Trash', account=account)
fld_item = FolderItem(thread=thread, folder=account.trash_folder)
folder_names = [folder.name for folder in thread.folders]
m = Message(namespace_id=account.namespace.id, subject='test message',
thread_id=thread.id, received_date=datetime.datetime.now(),
size=64, sanitized_body="body", snippet="snippet")
uid = ImapUid(account=account, message=m, g_labels=['\\Inbox', 'test-label'],
msg_uid=22L, folder_id=account.inbox_folder.id)
uid.folder = account.inbox_folder
uid2 = ImapUid(account=account, message=m, g_labels=['test-2'],
msg_uid=24L, folder_id=account.trash_folder.id)
uid2.folder = account.trash_folder
thread.messages.append(m)
add_any_new_thread_labels(thread, uid, db.session)
add_any_new_thread_labels(thread, uid2, db.session)
folder_names = [folder.name for folder in thread.folders]
for folder in folder_names:
assert folder in ['Inbox', 'Trash', 'test-label', 'test-2', '[Gmail]/All Mail', '[Gmail]/Important'],\
"all folders should be present"
# Now, remove the message
m.imapuids.remove(uid2)
db.session.delete(uid2)
db.session.flush()
recompute_thread_labels(thread, db.session)
folder_names = [folder.name for folder in thread.folders]
assert 'test-2' not in folder_names,\
"test-2 label should have been removed from thread"
def test_update_unread_status(db, thread, message, imapuid):
message.is_read = True
imapuid.is_seen = False
update_unread_status(imapuid)
assert message.is_read is False, "message shouldn't be read"
tag_names = [tag.name for tag in thread.tags]
assert 'unread' in tag_names, "thread should be unread"
ima | puid | .is_seen = True
update_unread_status(imapuid)
assert message.is_read is True, "message should be read"
tag_names = [tag.name for tag in thread.tags]
assert 'unread' not in tag_names, "thread should be read"
|
alexfalcucc/anaconda | anaconda_lib/linting/anaconda_pyflakes.py | Python | gpl-3.0 | 5,568 | 0.003592 | # -*- coding: utf8 -*-
# Copyright (C) 2013 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
import re
import _ast
from linting import linter
import pyflakes.checker as pyflakes
pyflakes.messages.Message.__str__ = (
lambda self: self.message % self.message_args
)
class PyFlakesError(pyflakes.messages.Message):
"""Lint error base class
"""
def __init__(self, filename, loc, level, message, message_args, **kwargs):
super(PyFlakesError, self).__init__(filename, loc)
self.level = level
self.message = message
self.message_args = message_args
class PyFlakesLinter(linter.Linter):
"""Linter for PyFlakes Linter
"""
def lint(self, settings, code, filename):
"""Run the pyflakes code checker with the given options
"""
errors = []
pyflakes_ignore = settings.get('pyflakes_ignore', None)
pyflakes_disabled = settings.get('pyflakes_disabled', False)
explicit_ignore = settings.get('pyflakes_explicit_ignore', [])
if not pyflakes_disabled and not settings.get('use_pylint'):
errors.extend(self.check(code, filename, pyflakes_ignore))
return self.parse(errors, explicit_ignore)
def check(self, code, filename, ignore=None):
"""Check the code with pyflakes to find errors
"""
class FakeLoc:
lineno = 0
try:
code = code.encode('utf8') + b'\n'
tree = compile(code, filename or '', 'exec', _ast.PyCF_ONLY_AST)
except (SyntaxError, IndentationError):
return self._handle_syntactic_error(code, filename)
except ValueError as error:
return [PyFlakesError(filename, FakeLoc(), error.args[0])]
else:
# the file is syntactically valid, check it now
w = pyflakes.Checker(tree, filename, ignore)
return w.messages
def parse(self, errors, explicit_ignore):
"""Parse the errors returned from the PyFlakes library
"""
error_list = []
if errors is None:
return error_list
errors.sort(key=linter.cmp_to_key(lambda a, b: a.lineno < b.lineno))
for error in errors:
error_level = 'W' if not hasattr(error, 'level') else error.level
message = error.message.capitalize()
error_data = {
'underline_range': False,
'level': error_level,
'lineno': error.lineno,
'message': message,
'raw_error': str(error)
}
if hasattr(error, 'offset'):
error_data['offset'] = error.offset
elif hasattr(error, 'col'):
error_data['offset'] = error.col
if (isinstance(error, (linter.OffsetError))):
error_data['underline_range'] = True
error_list.append(error_data)
elif (isinstance(
error, (
pyflakes.messages.RedefinedWhileUnused,
pyflakes.messages.UndefinedName,
pyflakes.messages.UndefinedExport,
pyflakes.messages.UndefinedLocal,
pyflakes.messages.UnusedVariable)) and
error.__class__.__name__ not in explicit_ignore):
error_data['len'] = len(error.message_args[0])
error_data['regex'] = (
r'((and|or|not|if|elif|while|in)\s+|[+\-*^%%<>=\(\{{])*\s'
'*(?P<underline>[\w\.]*{0}[\w]*)'.format(re.escape(
error.message_args[0]
))
)
error_list.append(error_data)
elif isinstance(error, pyflakes.messages.ImportShadowedByLoopVar):
regex = 'for\s+(?P<underline>[\w]*{0}[\w*])'.format(
| re.escape(error.message_args[0])
)
error_data['regex'] = regex
error_list.append(error_data)
elif (isinstance(
error, (
pyflakes.messages.UnusedImport,
pyflakes.messages.ImportStarUsed)) and
error.__class__.__name__ not in explicit_ignore):
if isinstance(error, pyflakes.messages.Im | portStarUsed):
word = '*'
else:
word = error.message_args[0]
linematch = '(from\s+[\w_\.]+\s+)?import\s+(?P<match>[^#;]+)'
r = '(^|\s+|,\s*|as\s+)(?P<underline>[\w]*{0}[\w]*)'.format(
re.escape(word)
)
error_data['regex'] = r
error_data['linematch'] = linematch
error_list.append(error_data)
elif (isinstance(error, pyflakes.messages.DuplicateArgument) and
error.__class__.__name__ not in explicit_ignore):
regex = 'def [\w_]+\(.*?(?P<underline>[\w]*{0}[\w]*)'.format(
re.escape(error.message_args[0])
)
error_data['regex'] = regex
error_list.append(error_data)
elif isinstance(error, pyflakes.messages.LateFutureImport):
pass
elif isinstance(error, linter.PythonError):
print(error)
else:
print(
'Ooops, we missed an error type for pyflakes', type(error)
)
return error_list
|
gista/django-selectfilter | selectfilter/utils.py | Python | mit | 1,579 | 0.034832 | # -*- coding: utf-8 -*-
# request helpers
def _cleanValue(value):
mapping = {
"True": True,
"False": False,
"None": None,
}
return mapping.get(value, value)
def lookupToString(lookup_dict):
"""
Convert the lookup dict into a string.
e.g.:
{"field1": "a", "field2": "b"} -> "field1=a,field2=b"
"""
return ",".join("%s=%s" % (k, v) for k, v in lookup_dict.items())
def stringToLookup(lookup_string):
"""
Convert the lookup string into a dict.
e.g.:
"field1=a,field2=b" -> {"field1": "a", "field2": "b"}
"""
lookup_dict = {}
for i in lookup_string.split(","):
if i:
key, value = i.split("=")
lookup_dict[str(key)] = _cleanValue(value)
return lookup_dict
# query helpers
def getLookups(lookups):
"""
The given "lookups" can be a callable object or a sequen | ce.
Return it as a list.
"""
return list(lookups() if callable(lookups) else lookups)
def _getManager(model, select_related):
if select_related is not None:
return model.objects.select_related(*select_related.split(","))
return model.objects
def getObjects(model, lookup_dict, select_related):
"""
Return the queryset given the model, the lookup dict and
the select_related part of the queryset (ignored if None).
"""
manager = _getM | anager(model, select_related)
return manager.filter(**lookup_dict)
def getObject(model, lookup_dict, select_related):
"""
Return the model instance given the model, the lookup dict and
the select_related part of the queryset (ignored if None).
"""
manager = _getManager(model, select_related)
return manager.get(**lookup_dict)
|
hfutsuchao/Python2.6 | SEOKeywordsAnalysis/KWComp.py | Python | gpl-2.0 | 3,699 | 0.0092 | #coding:utf-8
import urllib
import sys
file2014 = open('2014Kresult.txt','r').readlines()
file2015 = open('2015Kresult.txt','r').readlines()
result = open('2015result.txt','w')
dic2014 = {}
dic2015 = {}
dic = {}
uvCount = 0
pvCount = 0
ec = 0
c = 0
pv2014 = 0
pv2015 = 0
#2014关键字去重汇总
for line in file2014:
try:
kw, pv = line.split('\t')
if kw in dic2014:
dic2014[kw] = dic2014[kw] + int(pv[:-1])
else:
dic2014[kw] = int(pv[:-1])
pv2014 = pv2014 + int(pv[:-1])
except:
print line
#2015关键字去重汇总
for line in file2015:
try:
kw, pv = line.split('\t')
if kw in dic2015:
| dic2015[kw] = dic2015[kw] + int(pv[:-1])
else:
dic2015[kw] = int(pv[:-1])
pv2015 = pv2015 + int(pv[:-1])
except:
pri | nt line
print pv2014, pv2015
#计算二者之间的交集和差集
dif2015 = list(set(dic2015) - set(dic2014))
dif2014 = list(set(dic2014) - set(dic2015))
same = list(set(dic2014)&set(dic2015))
print '2015 and 2014:', len(dic2015), len(dic2014)
print 'same:',len(same), 'dif2014:', len(dif2014), 'dif2015:',len(dif2015)
citys = open('cdsx/city.txt','r').readlines()
districts = open('cdsx/district.txt','r').readlines()
streets = open('cdsx/street.txt','r').readlines()
xiaoqus = open('cdsx/xiaoqu.txt','r').readlines()
zufangKw = ['租房','出租']
dis = []
city = []
street = []
xiaoqu = []
for line in citys:
dis.append(line[:-1])
for line in districts:
city.append(line[:-1])
for line in streets:
street.append(line[:-1])
for line in xiaoqus:
xiaoqu.append(line[:-1])
'''
def dicGenerator(dim=[]):
dic = {}
dimLength = len(dim)
if dimLength == 0:
return dic
else:
for i in xrange(0,dimLength):
pass
'''
dic = {}
kwType = ['same','dif']
years = [2014,2015]
statType = ['count','sum']
isBrand = ['isBrand', 'notBrand']
for y in years:
dic[y] = {}
for k in kwType:
dic[y][k] = {}
for b in isBrand:
dic[y][k][b] = {}
for t in statType:
dic[y][k][b][t] = {}
count2014 = 0
count2015 = 0
sum2014 = 0
sum2015 = 0
c2014 = 0
c2015 = 0
s2014 = 0
s2015 = 0
#same KW 词类分布情况统计(分别修改city district street)
for kw in same:
'''sum2014 = sum2014 + dic2014[kw]
sum2015 = sum2015 + dic2015[kw]
if kw.find('赶集') != -1:
c2014 = c2014 + 1
s2014 = s2014 + dic2014[kw]
s2015 = s2015 + dic2015[kw]
continue'''
#result.write(kw + '\t' + str(dic2014[kw]) + '\n')
#pass
for k in zufangKw:
for p in city:
if kw.find(p) != -1 and kw.find(k) != -1:
count2014 = count2014 + 1
sum2014 = sum2014 + dic2014[kw]
sum2015 = sum2015 + dic2015[kw]
#print sum2015,'isBrand(count 2014uv 2015uv)',c2014,s2014,s2015
print 'notBrand(count 2014uv 2015uv)',count2014,sum2014,sum2015
sys.exit()
#dif KW 词类分布情况统计
for kw in dif2015:
#if kw.find('赶集') != -1:
result.write(kw + '\t' + str(dic2015[kw]) + '\n')
'''c2015 = c2015 + 1
s2015 = s2015 + dic2015[kw]
'''
'''for k in zufangKw:
for p in street:
if kw.find(p) != -1 and kw.find(k) != -1:
count2015 = count2015 + 1
sum2015 = sum2015 + dic2015[kw]'''
print count2014,count2015
print sum2014,sum2015
#print c2014,s2014,c2015,s2015
'''for kw in dic:
result.write(kw + '\t' + str(dic[kw]) + '\n')''' |
joopert/home-assistant | tests/components/deconz/test_scene.py | Python | apache-2.0 | 2,229 | 0.000897 | """deCONZ scene platform tests."""
from copy import deepcopy
from asynctest import patch
from homeassistant.components import deconz
from homeassistant.setup import async_setup_component
import homeassistant.components.scene as scene
from .test_gateway import ENTRY_CONFIG, DECONZ_WEB_REQUEST, setup_deconz_integration
GROUPS = {
"1": {
"id": "Light group id",
"name": "Light group",
"type": "LightGroup",
"state": {"all_on": False, "any_on": True},
"action": {},
"scenes": [{"id": "1", "name": "Scene"}],
"lights": [],
}
}
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a gateway."""
assert (
await async_setup_component(
hass, scene.DOMAIN, {"scene": {"platform": deconz.DOMAIN}}
)
is True
)
assert deconz.DOMAIN not in hass.data
async def test_no_scenes(hass):
"""Test that scenes can be loaded without scenes being available."""
data = deepcopy(DECONZ_WEB_REQUEST)
gateway = await setup_deconz_integration(
hass, ENTRY_CONFIG, options={}, get_state_response=data
)
| assert len(gateway.deconz_ids) == 0
assert len(hass.states.async_all()) == 0
async def test_scenes(hass):
"""Test that scenes works."""
data = deepcopy(DECONZ_WEB_REQUEST)
data["groups"] = deepcopy(GROUPS)
gateway = await setu | p_deconz_integration(
hass, ENTRY_CONFIG, options={}, get_state_response=data
)
assert "scene.light_group_scene" in gateway.deconz_ids
assert len(hass.states.async_all()) == 1
light_group_scene = hass.states.get("scene.light_group_scene")
assert light_group_scene
group_scene = gateway.api.groups["1"].scenes["1"]
with patch.object(
group_scene, "_async_set_state_callback", return_value=True
) as set_callback:
await hass.services.async_call(
"scene", "turn_on", {"entity_id": "scene.light_group_scene"}, blocking=True
)
await hass.async_block_till_done()
set_callback.assert_called_with("/groups/1/scenes/1/recall", {})
await gateway.async_reset()
assert len(hass.states.async_all()) == 0
|
DONIKAN/django | django/contrib/auth/management/commands/changepassword.py | Python | bsd-3-clause | 2,610 | 0.002682 | from __future__ import unicode_literals
import getpass
from django.contrib.auth import get_user_model
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
class Command(BaseCommand):
help = "Change a user's password for django.contrib.auth."
requires_system_checks = False
def _get_pass(self, prompt="Password: "):
p = getpass.getpass(prompt=force_str(prompt))
if not p:
raise CommandError("aborted")
return p
def add_arguments(self, parser):
parser.add_argument('username', nargs='?',
help='Username to change password for; by default, it\'s the current username.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".')
def handle(self, *args, **options):
if options.get('username'):
username = options['username']
else:
username = getpass.getuser()
UserModel = get_user_model()
try:
u = UserModel._default_manager.using(options.get('database')).get(**{
UserModel.USERNAME_FIELD: username
})
except UserModel.DoesNotExist:
raise CommandError("user '%s' does not exist" % username)
self.stdout.write("Changing password for user '%s'\n" % u)
MAX_TRIES = 3
count = 0
p1, p2 = 1, 2 # To make them initially mismatch.
password_validated = False
while (p1 != p2 or not password_validated) and count < MAX_TRIES:
p1 = self._get_pass()
p2 = self._get_p | ass("Password (again): ")
if p1 != p2:
self.stdout.write("Passwords do not match. Please try again.\n")
co | unt += 1
# Don't validate passwords that don't match.
continue
try:
validate_password(p2, u)
except ValidationError as err:
self.stdout.write(', '.join(err.messages))
count += 1
else:
password_validated = True
if count == MAX_TRIES:
raise CommandError("Aborting password change for user '%s' after %s attempts" % (u, count))
u.set_password(p1)
u.save()
return "Password changed successfully for user '%s'" % u
|
yamstudio/Codeforces | 100/118A - String Task.py | Python | gpl-3.0 | 109 | 0.018349 | word = raw_input().lower()
v = | 'aeiouy' |
n = ''
for c in word:
if not c in v:
n += '.' + c
print n |
jvicu2001/alexis-bot | bot/modules/guildlist.py | Python | mit | 1,174 | 0.002555 | from discord import Embed
from bot import Command, categories
class GuildList(Command):
def __init__(self, bot):
super().__init__(bot)
self.name = 'guildlist'
self.aliases = ['guilds']
self.category = categories.SETTINGS
self.bot_owner_only = True
async def handle(self, cmd):
if not cmd.is_pm:
await cmd.answer('$[guildlist-answer]')
if len(self.bot.guilds) == 0:
await cmd.answer('$[guildlist-none]', to_author=True)
return
a | wait cmd.answer('$[guildlist-msg]', locales={'amount': len(self.bot.guilds)}, to_author=True)
resp_list = ''
for guild in self.bot.guilds:
item = '- {} ({})'.format(guild.name, guild.id)
if len('{}\n{}'.format(resp_list, item)) > 2000:
await cmd | .answer(Embed(description=resp_list), withname=False, to_author=True)
resp_list = ''
else:
resp_list = '{}\n{}'.format(resp_list, item)
# Send remaining list
if resp_list != '':
await cmd.answer(Embed(description=resp_list), withname=False, to_author=True)
|
rafaeltg/pydl | pydl/models/linear/__init__.py | Python | mit | 50 | 0 | from . | lasso import Lasso
from .ridge | import Ridge
|
otadmor/Open-Knesset | simple/management/commands/parse_government_bill_pdf/textutil.py | Python | bsd-3-clause | 2,023 | 0.008403 | from itertools import chain
import re
from hashlib import md5
superscript = u'\u200F'
digits = set([str(i) for i in xrange(10)])
def fix_superscripts(txt):
""" Note: call this before reverse_numbers!!
Here's what happens:
We have the following input to pdftotext
AAAAA XS DBBB
What we get is
AAAAA BBB XS
D
So for instance DBBB would be a year number, and we get the most significant
digit on a separate line. X is the unicode superscript char, and S is the char
being superscripted.
Here we fix this, and return the list of superscripts with their location in the
text.
"""
superscripts = []
for i, l in enumerate(txt):
if superscript in l:
i_ss = l.find(superscript)
ss = l[i_ss+1:i_ss+2] # FIXME - this assumes single digit
# number of superscript should be on next line alone.
D = txt[i+1].strip()
txt[i] = txt[i][:i_ss-1] + D + txt[i][i_ss+2:]
del txt[i+1]
superscripts.append((i, i_ss, ss))
return txt, superscripts
def reverse_number | s(s):
ret = []
start = 0
for match in re.finditer('[0-9\.]+', s):
sl = slice(*match.span())
ret.append(s[start:sl.start])
ret.append(reversed(s[sl]))
start = sl.stop
ret.append(s[start:])
return ''.join(chain(*ret))
def sanitize(lines):
""" remove non text unicode charachters; maybe some of them could be used
to give hints on parsing?. """
return [line.re | place(u'\u202b','').replace(u'\u202c','').replace(u'\x0c','')
for line in lines]
def text_block_iter(lines):
block = []
for line in lines:
if line.strip() == '':
if len(block) > 0:
yield block
block = []
continue
block.append(line)
if len(block) > 0:
yield block
def checksum(lines):
return md5(''.join(lines)).digest()
def asblocks(lines):
return list(text_block_iter(lines))
|
donkawechico/arguman.org | web/profiles/models.py | Python | mit | 5,320 | 0.000564 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Count
from django.dispatch import receiver
from django.template.loader import render_to_string
from premises.models import Report, Premise
from premises.signals import (reported_as_fallacy, added_premise_for_premise,
added_premise_for_contention,
supported_a_premise)
from profiles.signals import follow_done
from django.utils.translation import ugettext_lazy as _
from django.core.mail import send_mail
class Profile(AbstractUser):
following = models.ManyToManyField("self", symmetrical=False)
notification_email = models.BooleanField(_('email notification'), default=True)
def serialize(self):
return {'username': self.username,
'email': self.email}
@property
def followers(self):
# todo: find a way to make reverse relationships
# with symmetrical false option
return Profile.objects.filter(following=self)
@property
def supported_premise_count(self):
return self.premise_set.aggregate(Count('supporters'))[
'supporters__count']
@models.permalink
def get_absolute_url(self):
return "auth_profile", [self.username]
NOTIFICATION_ADDED_PREMISE_FOR_CONTENTION = 0
NOTIFICATION_ADDED_PREMISE_FOR_PREMISE = 1
NOTIFICATION_REPORTED_AS_FALLACY = 2
NOTIFICATION_FOLLOWED_A_PROFILE = 3
NOTIFICATION_SUPPORTED_A_PREMISE = 4
NOTIFICATION_TYPES = (
(NOTIFICATION_ADDED_PREMISE_FOR_CONTENTION,
"added-premise-for-contention"),
(NOTIFICATION_ADDED_PREMISE_FOR_PREMISE,
"added-premise-for-premise"),
(NOTIFICATION_REPORTED_AS_FALLACY,
"reported-as-fallacy"),
(NOTIFICATION_FOLLOWED_A_PROFILE,
"followed"),
(NOTIFICATION_SUPPORTED_A_PREMISE,
"supported-a-premise"),
)
class Notification(models.Model):
# sender can be `null` for system notifications
sender = models.ForeignKey(settings.AUTH_USER_MODEL,
null=True, blank=True,
related_name="sent_notifications")
recipient = models.ForeignKey(settings.AUTH_USER_MODEL,
related_name="notifications")
date_created = models.DateTimeField(auto_now_add=True)
notification_type = models.IntegerField(choices=NOTIFICATION_TYPES)
is_read = models.BooleanField(default=False)
target_object_id = models.IntegerField(null=True, blank=True)
class Meta:
ordering = ['is_read', '-date_created']
def get_target_object(self):
model = {
NOTIFICATION_ADDED_PREMISE_FOR_CONTENTION: Premise,
NOTIFICATION_ADDED_PREMISE_FOR_PREMISE: Premise,
NOTIFICATION_REPORTED_AS_FALLACY: Report,
NOTIFICATION_FOLLOWED_A_PROFILE: Profile,
NOTIFICATION_SUPPORTED_A_PREMISE: Premise,
}.get(self.notification_type)
try:
instance = model.objects.get(pk=self.target_object_id)
except ObjectDoesNotExist:
instance = None
return instance
def render(self):
template_name = ("notifications/%s.html" %
self.get_notification_type_display())
return render_to_string(template_name, {
"notification": self,
"target_object": self.get_target_object()
})
@receiver(reported_as_fallacy)
def create_fallacy_notification(sender, report, *args, **kwargs):
Notification.objects.create(
sender=None, # notification should be anonymous
recipient=report.premise.user,
| no | tification_type=NOTIFICATION_REPORTED_AS_FALLACY,
target_object_id=report.id
)
@receiver(added_premise_for_premise)
def create_premise_answer_notification(sender, premise, *args, **kwargs):
if premise.user != premise.parent.user:
Notification.objects.create(
sender=premise.user,
recipient=premise.parent.user,
notification_type=NOTIFICATION_ADDED_PREMISE_FOR_PREMISE,
target_object_id=premise.id
)
@receiver(supported_a_premise)
def create_premise_answer_notification(premise, user, *args, **kwargs):
Notification.objects.create(
sender=user,
recipient=premise.user,
notification_type=NOTIFICATION_SUPPORTED_A_PREMISE,
target_object_id=premise.id
)
@receiver(added_premise_for_contention)
def create_contention_contribution_notification(sender, premise, *args, **kwargs):
if premise.user != premise.argument.user:
Notification.objects.create(
sender=premise.user,
recipient=premise.argument.user,
notification_type=NOTIFICATION_ADDED_PREMISE_FOR_CONTENTION,
target_object_id=premise.id
)
@receiver(follow_done)
def create_following_notification(following, follower, **kwargs):
"""
Sends notification to the followed user from the follower.
"""
Notification.objects.create(
target_object_id=follower.id,
notification_type=NOTIFICATION_FOLLOWED_A_PROFILE,
sender=follower,
recipient_id=following.id
)
|
cc-archive/commoner | src/commoner/profiles/tests/__init__.py | Python | agpl-3.0 | 76 | 0 | from test_delete import *
from test_openid i | mport *
from test_edit import *
| |
jimboca/ISYHelperHABridge | ihab/ISY.py | Python | apache-2.0 | 11,333 | 0.0105 |
try:
# python 2.7
from urllib import quote
from urllib import urlencode
except ImportError:
# python 3.4
from urllib.parse import quote
from urllib.parse import urlencode
import sys
import re
import PyISY
class isy():
def __init__(self,config,logger,status,bridge):
self.config = config
self.logger = logger
self.status = status
self.user = config['isy']['user']
self.password = config['isy']['password']
self.host = config['isy']['host']
self.port = config['isy']['port']
self.bridge = bridge
self.isy_url = "http://%s:%s@%s:%s/rest/nodes" % (self.user,self.password,self.host,self.port)
self.ihb_url = "http://%s:%s" % (config['this_host']['host'],config['this_host']['port'])
if config['isy']['log_enable']:
log = self.logger
else:
log=None
self.status.set("isy: Connecting %s:%s log=%s" % (self.host,self.port,log))
self.isy = PyISY.ISY(self.host, self.port, self.user, self.password, False, "1.1", log=log)
self.status.set("isy: connected: %s" % (str(self.isy.connected)))
self.isy.auto_update = True
self.get_spoken()
def get_spoken(self):
self.status.set("isy: Checking for Spoken objects.")
self.devices = []
for child in self.isy.nodes.allLowerNodes:
#print child
self.logger.debug(child)
if child[0] is 'node' or child[0] is 'group':
#self.logger.info(child)
main = self.isy.nodes[child[2]]
spoken = main.spoken
if spoken is not None:
# TODO: Should this be a comman seperate list of which echo will respond?
# TODO: Or should that be part of notes?
if spoken == '1':
spoken = main.name
self.logger.info("isy: name=%s spoken=%s" % (main.name,str(spoken)))
scene = False
if child[0] is 'node':
# Is it a controller of a scene?
cgroup = main.get_groups(responder=False)
if len(cgroup) > 0:
# TODO: We don't need to do this anymore, since we can put Spoken on Scenes!
scene = self.isy.nodes[cgroup[0]]
self.logger.info("isy: %s is a scene controller of %s='%s'" % (str(cgroup[0]),str(scene),scene.name))
#else:
# TODO: This shoud be all scene responders that are dimmable?
# TODO: Let set_on commands handle these
#if len(main.controllers) > 0:
# scene = self.isy.nodes[main.controllers[0]]
if self.has_device_by_name(spoken) is False:
self.devices.append(isy_node_handler(self,spoken,main,scene))
else:
self.logger.error("isy: Duplicate Ignored: '%s' for main='%s' scene=%s" % (spoken,main,scene))
# Now that we have all devices, delete bridge devices that don't exist anymore
prog = re.compile("isy:.+$")
for bdev in self.bridge.devices:
if self.has_device_by_id(bdev["id"]) is False and "mapId" in bdev and prog.match(bdev["mapId"]):
self.logger.warning("isy: Removing bridge device %s '%s'(%s)",bdev["id"],bdev["name"],bdev["mapId"])
self.bridge.delete(bdev)
def has_device_by_name(self,name):
for dev in self.devices:
if dev.name == name:
return dev
return False
def has_device_by_id(self,id):
for dev in self.devices:
if dev.bid == id:
return dev
return False
def has_device_by_mapid(self,map_id):
for dev in self.devices:
if dev.map_id == map_id:
return dev
return False
def do_cmd(self,id,cmd,val=None):
dev = self.has_device_by_id(id)
if dev is False:
self.logger.error("isy: No device '%s' for command '%s'",str(id),cmd)
return dev
if cmd == "on":
if val is None:
return dev.set_on()
else:
return dev.set_bri(val)
elif cmd == "off":
return dev.set_off()
self.logger.error("isy: Unknown command '%s' for device '%s'",cmd,str(id))
return False
#
# These push device changes to the ha-bridge
#
class isy_node_handler():
def __init__(self, parent, name, main, scene):
self.parent = parent
# F | orce as string | to make habridge happy?
self.name = str(name)
self.main = main
self.scene = scene
self.map_id = str("isy:%s" % (self.main._id))
self.parent.logger.info('isy:node:.__init__: name=%s node=%s scene=%s' % (self.name, self.main, self.scene))
# Subscribe to changes, if main is not a scene.
# This is because PyISY notification of scene on/off doesn't work properly,
# it notifies if anything on the kpl controlling that scene changes?
if type(self.main).__name__ != "Group":
main.status.subscribe('changed', self.get_all_changed)
# ISY URL's
self.isy_url = "%s/%s/cmd" % (self.parent.isy_url,quote(self.main._id))
self.isy_on = "%s/DON" % (self.isy_url)
self.isy_off = "%s/DOF" % (self.isy_url)
self.isy_bri = "%s/DON/{}" % (self.isy_url)
# The URL's that are passed to ha-bridge to control this device.
self.f_on = self.isy_on
self.f_off = self.isy_off
# TODO: If main is a scene, we should not set this since it does nothing?
self.f_bri = self.isy_bri
# Add it to the ha-bridge cause we need it's id for the ihb url's
self.add_or_update()
# IHB URL's
self.ihb_url = "%s/device/%s" % (self.parent.ihb_url,quote(self.bid))
self.ihb_on = "%s/on" % (self.ihb_url)
self.ihb_off = "%s/off" % (self.ihb_url)
self.ihb_bri = "%s/on/{}" % (self.ihb_url)
# TODO: Reset f_* functions if controlling thru ihab
if self.scene is not False or self.parent.config['use_rest'] is not True:
self.f_on = self.ihb_on
self.f_off = self.ihb_off
self.f_bri = self.ihb_bri
self.add_or_update()
# Set my on/off/bri status.
self.get_all()
def add_or_update(self):
self.payload = {
'name' : self.name,
'mapId' : self.map_id,
'deviceType' : 'custom',
'onUrl' : self.f_on,
'dimUrl' : self.f_bri.format('${intensity.byte}'),
'offUrl' : self.f_off,
'httpVerb' : 'GET',
}
(st,id) = self.parent.bridge.add_or_update_device(self.payload)
self.bid = id
def get_all_changed(self,e):
self.parent.logger.info('isy:get_all_changed: %s e=%s' % (self.name, str(e)))
self.get_all()
def get_all(self):
self.parent.logger.info('isy:get_all: %s status=%s' % (self.name, str(self.main.status)))
# Some devices don't have status, like polyglot?
if self.main.status is None:
self.parent.logger.info('isy:get_all: %s No device status available' % (self.name))
self.on = "false"
self.bri = 0
elif self.main.status is True:
self.bri = 0
self.on = "true"
elif self.main.status is False:
self.bri = 0
self.on = "false"
else:
# node.status will be 0-255
self.bri = self.main.status
if int(self.main.status) == 0:
self.on = "false"
else:
self.on = "true"
sel |
saketkc/open-ehr-django | open-ehr-django-src/report_manager/forms.py | Python | lgpl-3.0 | 1,714 | 0.012252 | from django import forms
from open-ehr.report_manager.models import *
class NewReportTypeForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.report_belongs_to_lab = kwargs.pop('report_belongs_to_lab', None)
super(NewReportTypeForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
instance = super(NewReportTypeForm, self).save(commit=False)
if self.report_belongs_to_lab:
instance.report_belongs_to_lab = self.report_belongs_to_lab
return instance.save()
class Meta:
model = ReportTypes
exclude = ('report_belongs_to_lab')
class ReportChoiceForm(forms.Form):
report_name = forms.ModelChoiceField(queryset = ReportTypes.objects.all())
class ReportResultsForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.lab_owner = kwargs.pop('lab_owner',None)
self.report_type = kwargs.pop('report_type',None)
super(ReportResultsForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
instance = super(ReportResultsForm, self).save(commit=False)
if self.lab_owner:
instance.lab_owner = self.lab_owner
if self.report_type:
instance.report_type = self.report_type
instance.save()
class Meta:
model = | ReportResults
exclude =('lab_owner','report_type')
def report_result_form(test_name_fields):
fields = {'user_mobile' : forms.CharField(max_length=11),"user_dob":forms.CharField(),}
for test_id in test_name_fields:
fields[test_name_fields[test_id]] = forms.CharField(max_length=100)
return type('ReportResultsForm', (forms | .BaseForm,), { 'base_fields': fields })
|
plouto5/Python | phone_sms/send_message.py | Python | gpl-3.0 | 255 | 0.031373 | from clockwork import | clockwork
api = clockwork.API('e22468d6b972f1425f77827f6f279d11b4b2c183')
message = clockwork.SMS(from_name = "The Dark Master", to = "6129102910", mes | sage = "I am watching you.....")
response = api.send(message)
print(response)
|
rohitranjan1991/home-assistant | tests/components/mqtt/test_cover.py | Python | mit | 102,124 | 0.000793 | """The tests for the MQTT cover platform."""
from unittest.mock import patch
import pytest
from homeassistant.components import cover
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
)
from homeassistant.components.mqtt import CONF_STATE_TOPIC
from homeassistant.components.mqtt.cover import (
CONF_GET_POSITION_TEMPLATE,
CONF_GET_POSITION_TOPIC,
CONF_SET_POSITION_TEMPLATE,
CONF_SET_POSITION_TOPIC,
CONF_TILT_COMMAND_TEMPLATE,
CONF_TILT_COMMAND_TOPIC,
CONF_TILT_STATUS_TEMPLATE,
CONF_TILT_STATUS_TOPIC,
MQTT_COVER_ATTRIBUTES_BLOCKED,
MqttCover,
)
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
CONF_VALUE_TEMPLATE,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_encoding_subscribable_topics,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_publishing_with_custom_encoding,
help_test_reloadable,
help_test_reloadable_late,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_setting_blocked_attribute_via_mqtt_json_message,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message
DEFAULT_CONFIG = {
cover.DOMAIN: {"platform": "mqtt", "name": "test", "state_topic": "test-topic"}
}
async def test_state_via_state_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "state-topic", STATE_OPEN)
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async def test_opening_and_closing_state_via_custom_state_payload(hass, mqtt_mock):
"""Test the controlling opening and closing state via a custom payload."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
" | payload_close": "CLOSE",
"payload_stop": "STOP",
"state_opening": "34",
"state_closing": "--43",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
a | sync_fire_mqtt_message(hass, "state-topic", "34")
state = hass.states.get("cover.test")
assert state.state == STATE_OPENING
async_fire_mqtt_message(hass, "state-topic", "--43")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSING
async_fire_mqtt_message(hass, "state-topic", STATE_CLOSED)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async def test_open_closed_state_from_position_optimistic(hass, mqtt_mock):
"""Test the state after setting the position using optimistic mode."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "position-topic",
"set_position_topic": "set-position-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
"optimistic": True,
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 0},
blocking=True,
)
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
assert state.attributes.get(ATTR_ASSUMED_STATE)
await hass.services.async_call(
cover.DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: "cover.test", ATTR_POSITION: 100},
blocking=True,
)
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
assert state.attributes.get(ATTR_ASSUMED_STATE)
async def test_position_via_position_topic(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"position_topic": "get-position-topic",
"position_open": 100,
"position_closed": 0,
"command_topic": "command-topic",
"qos": 0,
"payload_open": "OPEN",
"payload_close": "CLOSE",
"payload_stop": "STOP",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "get-position-topic", "0")
state = hass.states.get("cover.test")
assert state.state == STATE_CLOSED
async_fire_mqtt_message(hass, "get-position-topic", "100")
state = hass.states.get("cover.test")
assert state.state == STATE_OPEN
async def test_state_via_template(hass, mqtt_mock):
"""Test the controlling state via topic."""
assert await async_setup_component(
hass,
cover.DOMAIN,
{
cover.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "state-topic",
"command_topic": "command-topic",
"qos": 0,
"value_template": "\
{% if (value | multiply(0.01) | int) == 0 %}\
closed\
{% else %}\
open\
{% endif %}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("cover.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "state-topic", "10000")
|
twiest/openshift-tools | ansible/roles/lib_openshift_3.2/library/oc_pvc.py | Python | apache-2.0 | 40,439 | 0.002646 | #!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % sele | ctor)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.exten | d(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oc', 'adm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
|
armstrong/armstrong.apps.images | armstrong/apps/images/migrations/0001_initial.py | Python | apache-2.0 | 8,342 | 0.008032 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Image'
db.create_table('images_image', (
('content_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['content.Content'], unique=True, primary_key=True)),
('image', self.gf('sorl.thumbnail.fields.ImageField')(max_length=100)),
))
db.send_create_signal('images', ['Image'])
def backwards(self, orm):
# Deleting model 'Image'
db.delete_table('images_image')
models = {
'arm_access.accessobject': {
'Meta': {'object_name': 'AccessObject'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'arm_sections.section': {
'Meta': {'object_name': 'Section'},
'full_slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'to': "orm['arm_sections.Section']", 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'content.content': {
'Meta': {'object_name': 'Content'},
'access': ('armstrong.core.arm_access.fields.AccessField', [], {}),
'authors': ('armstrong.core.arm_content.fields.authors.AuthorsField', [], {'to': "orm['auth.User']", 'override_field_name': "'authors_override'", 'symmetrical': 'False', 'extra_field_name': "'authors_extra'", 'blank': 'True'}),
'authors_extra': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'authors_override': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
| 'pub_status': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'sections': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'content_content_alternates | '", 'null': 'True', 'symmetrical': 'False', 'to': "orm['arm_sections.Section']"}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'images.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['content.Content']},
'content_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['content.Content']", 'unique': 'True', 'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db |
abishara/athena_meta | athena/stages/check_reads.py | Python | mit | 5,478 | 0.011318 | import os
import sys
import pysam
import random
from collections import Counter
from .step import StepChunk
from ..mlib import util
from ..mlib.fq_idx import FastqIndex
MIN_SEED_SIZE = 400
MIN_COV = 10.
class CheckReadsStep(StepChunk):
@staticmethod
def get_steps(options):
assert os.path.isfile(options.input_fqs), \
"fastqs {} not found".format(options.input_fqs)
yield CheckReadsStep(options, options.input_fqs)
def outpaths(self, final=False):
paths = {}
paths['pass.file'] = os.path.join(self.outdir, 'pass')
paths['index.file'] = FastqIndex.get_index_path(self.nfq_path)
paths['bins.p'] = self.options.bins_pickle_path
return paths
@property
def outdir(self):
return os.path.join(
self.options.results_dir,
self.__class__.__name__,
str(self),
)
def __init__(
self,
options,
fq_path,
):
self.options = options
self.fq_path = fq_path
#self.nfq_path = fq_path[:-3]
self.nfq_path = fq_path
util.mkdir_p(self.outdir)
def __fqid(self):
return os.path.basename(os.path.dirname(os.path.dirname(self.fq_path)))
def __str__(self):
return '{}_{}'.format(
self.__class__.__name__,
self.__fqid(),
)
def run(self):
self.logger.broadcast('index fastq {}'.format(self.nfq_path))
with FastqIndex(self.nfq_path, self.logger) as idx:
fq_num_se_bcoded = idx.num_se_bcoded
# check for barcodes in fastq
assert idx.num_bcodes > 0, \
"no barcodes specified in fastq {}".format(self.fq_path)
if idx.num_se * 0.8 > idx.num_se_bcoded:
print \
'''lower than expected ({:2.2f}%) of barcoded reads detected in fastq {}
'''.format(100.*idx.num_se_bcoded / idx.num_se, self.fq_path)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
# use cheat seeds if | specified (for debugging)
if self.options.cheat_seeds:
self.logger.broadcast('using cheat seeds file: {}'.format(self.options.cheat_seeds))
seeds = set()
with open(self.options.cheat_seeds) as fin:
for line in fin:
seed = line.strip()
seeds.add(seed)
self.logger.broadcast(' - loaded {}'.format(len(seeds)))
seeds = list(seeds)
# use re | ad mappings from *bam to select seeds without high enough input
# coverage
else:
self.logger.broadcast('get seed contigs from input assembly')
ctg_covs, bam_num_se_bcoded = self.get_bam_stats()
if bam_num_se_bcoded < 0.8 * fq_num_se_bcoded:
print \
'''lower than expected amount (~{:2.2f}%) of barcoded reads from fastq {} detected in bam {}
'''.format(
100.*bam_num_se_bcoded / fq_num_se_bcoded,
self.fq_path,
self.options.reads_ctg_bam_path,
)
if self.options.force_reads:
print \
''' --force_reads specified, proceeding without *fastq and *bam QC passing.
'''
else:
print \
''' specify --force_reads to bypass QC checks. Barcoded subassembly likely to fail.
'''
sys.exit(1)
seeds = self.get_seeds(ctg_covs)
random.shuffle(seeds)
# strip seed contigs into bins such that no more than 4000 bins
bins = []
group_size = max(1, len(seeds) / 4000)
for i, seed_group in \
enumerate(util.grouped(seeds, group_size, slop=True)):
binid = 'bin.{}'.format(i)
bins.append((binid, seed_group))
self.logger.broadcast('created {} bins from seeds'.format(len(bins)))
util.write_pickle(self.options.bins_pickle_path, bins)
passfile_path = os.path.join(self.outdir, 'pass')
util.touch(passfile_path)
self.logger.broadcast('done')
def get_seeds(self, ctg_covs):
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
seeds = ctg_size_map.keys()
seeds = filter(
lambda(c): (
ctg_size_map[c] >= MIN_SEED_SIZE and
ctg_covs[c] >= MIN_COV
),
seeds,
)
self.logger.broadcast(' {} total inputs seeds covering {} bases'.format(
len(ctg_size_map), sum(ctg_size_map.values())
))
self.logger.broadcast(' {} input seed contigs >= {}bp and >= {}x coverage covering {} bases'.format(
len(seeds),
MIN_SEED_SIZE,
MIN_COV,
sum(map(lambda(c): ctg_size_map[c], seeds)),
))
return seeds
def get_bam_stats(self):
ctg_counts_path = os.path.join(self.options.working_dir, 'ctg_counts.p')
if os.path.isfile(ctg_counts_path):
return util.load_pickle(ctg_counts_path)
self.logger.broadcast('computing seed coverages (required pass thru *bam)')
bam_fin = pysam.Samfile(self.options.reads_ctg_bam_path, 'rb')
ctg_bases = Counter()
num_se_bcoded = 0
for i, read in enumerate(bam_fin):
if not read.is_secondary and util.get_barcode(read) != None:
num_se_bcoded += 1
if read.is_unmapped:
continue
seed_ctg = bam_fin.getrname(read.tid)
ctg_bases[seed_ctg] += read.query_alignment_length
ctg_size_map = util.get_fasta_sizes(self.options.ctgfasta_path)
ctg_covs = Counter(dict(map(
lambda(c, b) : (c, 1. * b / ctg_size_map[c]),
ctg_bases.iteritems()
)))
util.write_pickle(ctg_counts_path, (ctg_covs, num_se_bcoded))
return ctg_covs, num_se_bcoded
|
macky360/Bitcoin-Related_Addresses | lib/marketquery/exceptions.py | Python | gpl-2.0 | 148 | 0.013514 | class DateNotFoundError(Exception):
def __init__(self, value):
self.value = valu | e
def __str__(self):
return re | pr(self.value) |
laosiaudi/tensorflow | tensorflow/python/kernel_tests/decode_raw_op_test.py | Python | apache-2.0 | 2,806 | 0.009622 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expre | ss or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for DecodeRaw op from parsing_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import t | ensorflow as tf
class DecodeRawOpTest(tf.test.TestCase):
def testToUint8(self):
with self.test_session():
in_bytes = tf.placeholder(tf.string, shape=[2])
decode = tf.decode_raw(in_bytes, out_type=tf.uint8)
self.assertEqual([2, None], decode.get_shape().as_list())
result = decode.eval(feed_dict={in_bytes: ["A", "a"]})
self.assertAllEqual([[ord("A")], [ord("a")]], result)
result = decode.eval(feed_dict={in_bytes: ["wer", "XYZ"]})
self.assertAllEqual([[ord("w"), ord("e"), ord("r")],
[ord("X"), ord("Y"), ord("Z")]], result)
with self.assertRaisesOpError(
"DecodeRaw requires input strings to all be the same size, but "
"element 1 has size 5 != 6"):
decode.eval(feed_dict={in_bytes: ["short", "longer"]})
def testToInt16(self):
with self.test_session():
in_bytes = tf.placeholder(tf.string, shape=[None])
decode = tf.decode_raw(in_bytes, out_type=tf.int16)
self.assertEqual([None, None], decode.get_shape().as_list())
result = decode.eval(feed_dict={in_bytes: ["AaBC"]})
self.assertAllEqual([[ord("A") + ord("a") * 256,
ord("B") + ord("C") * 256]], result)
with self.assertRaisesOpError(
"Input to DecodeRaw has length 3 that is not a multiple of 2, the "
"size of int16"):
decode.eval(feed_dict={in_bytes: ["123", "456"]})
def testToFloat16(self):
with self.test_session():
in_bytes = tf.placeholder(tf.string, shape=[None])
decode = tf.decode_raw(in_bytes, out_type=tf.float16)
self.assertEqual([None, None], decode.get_shape().as_list())
expected_result = np.matrix([[1, -2, -3, 4]], dtype=np.float16)
result = decode.eval(feed_dict={in_bytes: [expected_result.tostring()]})
self.assertAllEqual(expected_result, result)
if __name__ == "__main__":
tf.test.main()
|
sestrella/ansible | lib/ansible/module_utils/network/nxos/argspec/interfaces/interfaces.py | Python | gpl-3.0 | 2,238 | 0 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The arg spec for the nxos_interfaces module
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class InterfacesArgs(object): # pylint: disable=R0903
"""The arg spec for the nxos_interfaces module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
'config': {
'elements': 'dict',
'options': {
'description': {
'type': 'str'
},
'duplex': {
'choices': ['full', 'half', 'auto'],
'type': 'str'
},
'enabled': {
'default': True,
'type': 'bool'
},
'fabric_forwarding_anycast_gateway': {
'type': 'bool'
},
'ip_forward': {
'type': 'bool'
},
'mode': {
'choices': ['layer2', 'layer3'],
'type': 'str'
},
'mtu': {
'type': 'str'
},
'name': {
'required': Tr | ue,
'type': 'str'
},
'speed': {
'type': 'str'
}
},
'type': 'list'
},
'state': {
'choices': ['merged', 'replaced', 'overridden', 'deleted'],
'default': 'merged',
'type': 'str'
}
} | # pylint: disable=C0301
|
drepetto/chiplotle | chiplotle/geometry/core/test/test_path_add.py | Python | gpl-3.0 | 1,814 | 0.007166 | from chiplotle.geometry.core.path import Path
from chiplotle.ge | ometry.core.coordinate import Coordinate
from chiplotle.core import errors
from py.test import raises
def test_path_add_01( ):
'''A Path and an int cannot be added.'''
assert raises(TypeError, 'Path([(1, 2), (3, 4)]) + 3')
def test_path_add_02( ):
'''A Path and a float cannot be added.'''
assert raises(TypeError, 'Path([(1, 2), (3, 4)]) + 3.2')
def test_path_radd_02( ):
'''A float and a Path cannot be add | ed.'''
assert raises(TypeError, '3.2 + Path([(1, 2), (3, 4)])')
def test_path_add_03( ):
'''A Path and a Coordinate can be added.'''
a = Path([(1, 2), (3, 4)])
t = a + Coordinate(1, 2)
assert t is not a
assert isinstance(t, Path)
assert t == Path([(2, 4), (4, 6)])
def test_path_radd_03( ):
'''A Coordinate and a Path can be added.'''
a = Path([(1, 2), (3, 4)])
t = Coordinate(1, 2) + a
assert t is not a
assert isinstance(t, Path)
assert t == Path([(2, 4), (4, 6)])
def test_path_add_04( ):
'''A Path and a duple cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, 'a + (1, 2)')
def test_path_radd_04( ):
'''A duple and a Path cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, '(1, 2) + a')
def test_path_add_05( ):
'''A 2D Path and a triple cannot be added.'''
a = Path([(1, 2), (3, 4)])
assert raises(TypeError, 'a + (1, 2, 3)')
def test_path_add_06( ):
'''A Path and a Path cannot be added.'''
a = Path([(1, 2), (3, 4)])
b = Path([(2, 3)])
assert raises(TypeError, 'a + b')
## in place addition __iadd__ ##
def test_path_iadd_01( ):
'''A float and a Path cannot be added.'''
t = Path([(1, 2), (3, 4)])
assert raises(TypeError, 't += 3.2')
|
anhstudios/swganh | data/scripts/templates/object/tangible/lair/base/shared_objective_data_terminal.py | Python | mit | 457 | 0.04814 | #### NOTICE: THIS FILE IS AUTOGENER | ATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/base/shared_objective_data_termi | nal.iff"
result.attribute_template_id = -1
result.stfName("lair_n","data_terminal")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
REGOVAR/Regovar | regovar/core/managers/__init__.py | Python | agpl-3.0 | 954 | 0.001048 | #!env/python3
# coding: utf-8
from core.managers.containers import *
from core.managers.imports import *
from core.managers.analysis_manager import AnalysisManage | r
from core.managers.annotation_manager import AnnotationManager
from core.managers.file_manager import FileManager
from core.managers.filter_manager import FilterEngine
from core.managers.job_manager import JobManager
from core.managers.pipeline_manager import PipelineManager
from core.managers.project_manager import ProjectManager
from core.managers.sample_manager import SampleManager
from core.m | anagers.user_manager import UserManager
from core.managers.search_manager import SearchManager
from core.managers.event_manager import EventManager
from core.managers.subject_manager import SubjectManager
from core.managers.admin_manager import AdminManager
from core.managers.phenotype_manager import PhenotypeManager
from core.managers.panel_manager import PanelManager |
eustislab/horton | horton/cache.py | Python | gpl-3.0 | 14,592 | 0.001234 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2015 T | he HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
| # GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
'''Avoid recomputation of earlier results and reallocation of existing arrays
In principle, the ``JustOnceClass`` and the ``Cache`` can be used
independently, but in some cases it makes a lot of sense to combine them.
See for example the density partitioning code in ``horton.part``.
'''
import numpy as np, types
from horton.log import log
__all__ = ['JustOnceClass', 'just_once', 'Cache']
class JustOnceClass(object):
'''Base class for classes with methods that should never be executed twice.
In typically applications, these methods get called many times, but
only during the first call, an actual computation is carried out. This
way, the caller can safely call a method, just to make sure that a
required result is computed.
All methods in the subclasses that should have this feature, must be
given the ``just_once`` decoratore, e.g. ::
class Example(JustOnceClass):
@just_once
def do_something():
self.foo = self.bar
When all results are outdated, one can call the ``clear`` method
to forget which methods were called already.
'''
def __init__(self):
self._done_just_once = set([])
def __clear__(self):
self.clear()
def clear(self):
self._done_just_once = set([])
def just_once(fn):
def wrapper(instance):
if not hasattr(instance, '_done_just_once'):
raise TypeError('Missing hidden _done_just_once. Forgot to call JustOnceClass.__init__()?')
if fn.func_name in instance._done_just_once:
return
fn(instance)
instance._done_just_once.add(fn.func_name)
wrapper.__doc__ = fn.__doc__
return wrapper
def _normalize_alloc(alloc):
'''Normalize the alloc argument of the from_alloc and check_alloc methods'''
if not hasattr(alloc, '__len__'):
alloc = (alloc,)
if len(alloc) == 0:
raise TypeError('Alloc can not be an empty list')
return alloc
def _normalize_tags(tags):
'''Normalize the tags argument of the CacheItem constructor'''
if tags is None:
return set([])
else:
return set(tags)
class CacheItem(object):
'''A container for an object stored in a Cache instance'''
def __init__(self, value, own=False, tags=None):
'''
**Arguments:**
value
The object stored in this container
**Optional arguments:**
own
If True, this container will denounce the memory allocated for
the contained object. This can only be True for a numpy array.
'''
self._value = value
self._valid = True
self._own = own
self._tags = _normalize_tags(tags)
@classmethod
def from_alloc(cls, alloc, tags):
alloc = _normalize_alloc(alloc)
if all(isinstance(i, int) for i in alloc):
# initialize a floating point array
array = np.zeros(alloc, float)
log.mem.announce(array.nbytes)
return cls(array, own=True, tags=tags)
else:
# initialize a new object
return cls(alloc[0](*alloc[1:]), tags=tags)
def __del__(self):
if self._own and log is not None:
assert isinstance(self._value, np.ndarray)
log.mem.denounce(self._value.nbytes)
def check_alloc(self, alloc):
alloc = _normalize_alloc(alloc)
if all(isinstance(i, int) for i in alloc):
# check if the array has the correct shape and dtype
if not (isinstance(self._value, np.ndarray) and
self._value.shape == tuple(alloc) and
issubclass(self._value.dtype.type, float)):
raise TypeError('The stored item does not match the given alloc.')
else:
# check if the object was initialized with compatible arguments
try:
if isinstance(alloc[0], type):
# first argument is a class
alloc[0].__check_init_args__(self._value, *alloc[1:])
elif isinstance(alloc[0], types.MethodType):
# first argument is something else, assuming a method of a factory class
factory = alloc[0].__self__
alloc[0].__check_init_args__(factory, self._value, *alloc[1:])
else:
raise NotImplementedError
except AssertionError:
raise TypeError('The stored item does not match the given alloc.')
def check_tags(self, tags):
tags = _normalize_tags(tags)
if tags != self._tags:
raise ValueError('Tags do not match.')
def _get_value(self):
if not self._valid:
raise ValueError('This cached item is not valid.')
return self._value
value = property(_get_value)
def _get_valid(self):
return self._valid
valid = property(_get_valid)
def _get_tags(self):
return self._tags
tags = property(_get_tags)
def clear(self):
'''Mark the item as invalid and clear the contents of the object.
**Returns:** A boolean indicating that clearing was successful
'''
self._valid = False
if isinstance(self._value, np.ndarray):
self._value[:] = 0.0
elif hasattr(self._value, '__clear__') and callable(self._value.__clear__):
self._value.__clear__()
else:
return False
return True
class NoDefault(object):
pass
no_default = NoDefault()
def _normalize_key(key):
'''Normalize the key argument(s) of the load and dump methods'''
if hasattr(key, '__len__') and len(key) == 0:
raise TypeError('At least one argument needed to specify a key.')
# upack the key if needed
while len(key) == 1 and isinstance(key, tuple):
key = key[0]
return key
class Cache(object):
'''Object that stores previously computed results.
The cache behaves like a dictionary with some extra features that can be
used to avoid recomputation or reallocation.
'''
def __init__(self):
self._store = {}
def clear(self, **kwargs):
'''Clear all items in the cache
**Optional arguments:**
dealloc
When set to True, the items are really removed from memory.
tags
Limit the items cleared to those who have at least one tag
that matches one of the given tags. When this argument is used
and it contains at least one tag, items with no tags are not
cleared.
'''
# Parse kwargs. This forces the caller to use keywords in order to avoid
# confusion.
dealloc = kwargs.pop('dealloc', False)
tags = kwargs.pop('tags', None)
if len(kwargs) > 0:
raise TypeError('Unexpected arguments: %s' % kwargs.keys())
# actual work
tags = _normalize_tags(tags)
for key, item in self._store.items():
if len(tags) == 0 or len(item.tags & tags) > 0:
self.clear_item(key, dealloc=dealloc)
def clear_item(self, *key, **kwargs):
'''Clear a selected item from the cache
**Optional argumen |
eSDK/esdk_uc_control_js | open_src/firebreath/fbgen.py | Python | apache-2.0 | 7,056 | 0.009212 | #!/usr/bin/env python
# encoding: utf-8
"""
Utility script to generate/modify Firebreath plug-in projects.
Original Author(s): Ben Loveridge, Richard Bateman
Created: 14 December 2009
License: Dual license model; choose one of two:
New BSD License
http://www.opensource.org/licenses/bsd-license.php
- or -
GNU Lesser General Public License, version 2.1
http://www.gnu.org/licenses/lgpl-2.1.html
Copyright 2009 Packet Pass, Inc. and the Firebreath development team
"""
import os, re, sys, time, uuid
from fbgen.gen_templates import *
from optparse import OptionParser
from ConfigParser import SafeConfigParser
def getTemplateFiles(basePath, origPath=None):
"""
Obtains the location to the template files. Discovers any newly added files automatically.
@param basePath location from which to start searching for files.
@param origPath used to strip path information from the returned values. Defaults to None.
@returns array of strings each entry representing a single file.
"""
if origPath is None:
origPath = basePath
plen = len(origPath) + len(os.path.sep)
files = []
for filename in os.listdir(basePath):
tmpName = os.path.join(basePath, filename)
if filename == '.' or filename == ".." or tmpName is None:
continue
if os.path.isdir(tmpName):
files.extend(getTemplateFiles(tmpName, origPath) )
else:
files.append(tmpName[plen:])
return files
def createDir(dirName):
"""
Creates a directory, even if it has to create parent directories to do so
"""
parentDir = os.path.dirname(dirName)
print "Parent of %s is %s" % (dirName, parentDir)
if os.path.isdir(parentDir):
print "Creating dir %s" % dirName
os.m | kdir(dirName)
else:
createDir(parentDir)
createDir(dirName)
def Main():
"""
Parse the commandline and execute the appropriate actions.
"""
# Define the command-line interface via OptionParser
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option("-p", "--plugin-name", d | est = "pluginName")
parser.add_option("-i", "--plugin-identifier", dest = "pluginIdent",
help = "3 or more alphanumeric characters (underscores allowed after first position)")
parser.add_option("-c", "--company-name", dest = "companyName")
parser.add_option("-d", "--company-domain", dest = "companyDomain")
parser.add_option("-g", "--disable-gui", dest = "disableGUI")
options, args = parser.parse_args()
if options.pluginName and options.pluginIdent and options.companyName and options.companyDomain:
options.interactive = False
else:
options.interactive = True
scriptDir = os.path.dirname(os.path.abspath(__file__) )
cfgFilename = os.path.join(scriptDir, ".fbgen.cfg")
cfgFile = SafeConfigParser()
cfgFile.read(cfgFilename)
# Instantiate the appropriate classes
plugin = Plugin(name = options.pluginName, ident = options.pluginIdent, disable_gui = options.disableGUI)
plugin.readCfg(cfgFile)
company = Company(name = options.companyName)
company.readCfg(cfgFile)
if options.interactive:
try:
plugin.promptValues()
company.promptValues()
except KeyboardInterrupt:
print "" # get off of the line where the KeyboardInterrupt happened
sys.exit(0) # terminate gracefully
plugin.updateCfg(cfgFile)
company.updateCfg(cfgFile)
guid = GUID(ident = plugin.ident, domain = company.domain)
# Generate the guids needed by the templates
generatedGuids = AttrDictSimple()
generatedGuids.GUIDS_TYPELIB = guid.generate("TYPELIB")
generatedGuids.GUIDS_CONTROLIF = guid.generate("CONTROLIF")
generatedGuids.GUIDS_CONTROL = guid.generate("CONTROL")
generatedGuids.GUIDS_JSIF = guid.generate("JSIF")
generatedGuids.GUIDS_JSOBJ = guid.generate("JSOBJ")
generatedGuids.GUIDS_EVTSRC = guid.generate("EVTSRC")
generatedGuids.GUIDS_INSTPROD = guid.generate("INSTPROD")
generatedGuids.GUIDS_INSTUPGR = guid.generate("INSTUPGR")
generatedGuids.GUIDS_INSTUPGR64 = guid.generate("INSTUPGR64")
generatedGuids.GUIDS_companydircomp = guid.generate("companydircomp")
generatedGuids.GUIDS_installdircomp = guid.generate("installdircomp")
# Time-related values used in templates
templateTime = AttrDictSimple(YEAR = time.strftime("%Y"))
# Save configuration for another go
cfgFile.write(open(cfgFilename, "wb") )
# Make sure we can get into the projects directory
basePath = os.path.join(scriptDir, "projects")
if not os.path.isdir(basePath):
try:
os.mkdir(basePath)
except:
print "Unable to create directory", basePath
sys.exit(1)
# Try to create a directory for this project
projPath = os.path.abspath(os.path.join(basePath, "%s" % plugin.ident))
if os.path.isdir(projPath):
try:
overwrite = raw_input("\nDirectory already exists. Continue anyway? [y/N] ")
except KeyboardInterrupt:
print "" # get off of the line where the KeyboardInterrupt happened
sys.exit(0) # terminate gracefully
if len(overwrite) == 0 or overwrite[0] not in ("Y", "y"):
print "\nAborting"
sys.exit(1)
else:
try:
os.mkdir(projPath)
except:
print "Failed to create project directory", projPath
sys.exit(1)
print "\nProcessing templates"
srcDir = os.path.join(scriptDir, "fbgen", "src")
srcDirLen = len(srcDir) + len(os.path.sep)
templateFiles = getTemplateFiles(srcDir)
for tpl in templateFiles:
try:
tplPath, tplFilename = os.path.split(tpl)
if tplFilename.startswith("Template"):
tplFilename = tplFilename.replace("Template", plugin.ident, 1)
if tplPath:
filename = os.path.join(projPath, tplPath, tplFilename)
else:
filename = os.path.join(projPath, tplFilename)
dirname = os.path.dirname(filename)
if not os.path.isdir(dirname):
createDir(dirname)
tplFile = os.path.join("fbgen", "src", tpl)
print tplFile
template = Template(tplFile)
#Special case for binary files
if(tplFilename == "background.png"):
input = open(tplFile, "rb")
output = open(filename, "wb")
output.write(input.read())
else:
f = open(filename, "wb")
f.write(template.process(plugin, company, guid, generatedGuids, templateTime))
print " Processed", tpl
except:
print " Error processing", tpl
raise
print "Done. Files placed in", projPath
if __name__ == "__main__":
Main()
|
DarioGT/OMS-PluginXML | org.modelsphere.sms/lib/jython-2.2.1/Lib/email/MIMEImage.py | Python | gpl-3.0 | 1,794 | 0.001115 | # Copyright (C) 2001,2002 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""Class representing image/* type MIME documents.
"""
import imghdr
from email import Errors
from email import Encoders
from email.MIMENonMultipart import MIMENonMultipart
class MIMEImage(MIMENonMultipart):
"""Class for generating image/* type MIME documents."""
def __init__(self, _imagedata, _subtype=None,
_encoder=Encoders.encode_base64, **_params):
"""Create an image/* type MIME document.
_imagedata is a string containing the raw image data. If this | data
can be decoded by the standard Python `imghdr' module, then the
subtype will be automatically included in the Content-Type header.
Otherwise, you can specify t | he specific image subtype via the _subtype
parameter.
_encoder is a function which will perform the actual encoding for
transport of the image data. It takes one argument, which is this
Image instance. It should use get_payload() and set_payload() to
change the payload to the encoded form. It should also add any
Content-Transfer-Encoding or other headers to the message as
necessary. The default encoding is Base64.
Any additional keyword arguments are passed to the base class
constructor, which turns them into parameters on the Content-Type
header.
"""
if _subtype is None:
_subtype = imghdr.what(None, _imagedata)
if _subtype is None:
raise TypeError, 'Could not guess image MIME subtype'
MIMENonMultipart.__init__(self, 'image', _subtype, **_params)
self.set_payload(_imagedata)
_encoder(self)
|
SalesforceFoundation/CumulusCI | cumulusci/core/template_utils.py | Python | bsd-3-clause | 1,409 | 0 | from functools import lru_cache
from faker import Faker
from jinja2 import Template
class StringGenerator:
"""Sometimes in templates you want a reference to a variable to
call a function.
For example:
>>> x = template_utils.StringGenerator(datetime.today().isoformat)
>>> print(f"{x}")
2019-09-23T11:49:01.994453
>>> x = template_utils.StringGenerator(lambda:str(random.random()))
>>> print(f"{x}")
0.795273959965055
>>> print(f"{x}")
0.053061903749985206
"""
def __init__(self, func):
self.func = func
def __str__(self):
return self.func()
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
class FakerTemplateLibrary:
"""A Jinja template library to add the faker.xyz objects to templates"""
_faker = None
def __init__(self, locale=None):
self.locale = locale
self.faker = Faker(self.locale)
def __getattr__(self, name):
return StringGenerator(
lambda *args, **kwargs: self.faker.format(name, *args, **kwargs)
)
faker_template_library = FakerTemplateLibrary()
Template = lru_cache(512)(Template)
def format_str(value, variables=None, fake=faker_template_library):
variables = variab | les or {}
if isinstance(value, str) and "{" in value:
value = Template(value).render(fake=fake, **variab | les)
return value
|
gov-cjwaszczuk/notifications-admin | app/main/views/new_password.py | Python | mit | 2,038 | 0.003925 | from datetime import datetime
import json
from flask import (render_template, url_for, redirect, flash, session, current_app)
from itsdangerous import SignatureExpired
from notifications_utils.url_safe_token import check_token
from app import user_api_client
from app.main import main
from app.main.forms import NewPasswordForm
from app.main.views.two_factor import log_in_user
@main.route('/new-password/<path:token>', methods=['GET', 'POST'])
def new_password(token):
try:
token_data = check_token(token, current_app.config['SECRET_KEY'], current_app.config['DANGEROUS_SALT'],
current_app.config['EMAIL_EXPIRY_SECONDS'])
except SignatureExpired:
flash('The link in the email we sent you has expired. Enter your email address to resend.')
return redirect(url_for('.forgot_password'))
email_address = json.loads(token_data)['email']
user = user_api_client.get_user_by_email(email_address)
if user.password_chang | ed_at and datetime.strptime(user.password_changed_at, '%Y-%m-%d %H:%M:%S.%f') > \
datetime.strptime(json.loads(token_data)['created_at'], '%Y-%m-%d %H:%M:%S.%f'):
flash('The link in the email has already been used')
return redirect(url_for('main.index'))
form = NewPasswordF | orm()
if form.validate_on_submit():
user_api_client.reset_failed_login_count(user.id)
session['user_details'] = {
'id': user.id,
'email': user.email_address,
'password': form.new_password.data}
if user.auth_type == 'email_auth':
# they've just clicked an email link, so have done an email auth journey anyway. Just log them in.
return log_in_user(user.id)
else:
# send user a 2fa sms code
user_api_client.send_verify_code(user.id, 'sms', user.mobile_number)
return redirect(url_for('main.two_factor'))
else:
return render_template('views/new-password.html', token=token, form=form, user=user)
|
CodieCodemonkey/PyCog | examples/eight_queens.py | Python | lgpl-3.0 | 5,809 | 0.001549 | """Backtracking example: 8-queens problem."""
if __name__ == "__main__":
import sys
import os.path as op
sys.path.append(op.abspath(op.join('..', 'packages')))
from pycog.statemachine import *
from pycog.exceptions import *
from pycog.backtrack import *
from pycog.utility.diagram import diagram
# Strategy:
#
# The states are ordered pairs (row, col) for each of the 64 squares on the
# board. The state has two possible values: False if there is no queen on the
# square, True otherwise. When we enter a state we record the coordinates of
# the associated square as a queen location. When we backtrack we erase that
# record.
#
# We could allow a transition from any square to any other one, but we'll use
# the transitions we assign to each square limit the paths. For any square
# (row, col) we'll only consider transitions into the squares of the next
# column, that are not in the same row. So there are 7 candidate transitions
# from any square, except for the ones in the last column, which all transition
# to state "final".
#
# The transition tests will allow check for queen on the same diagonal or row
# as the candidate square. There is no need to check for a queen in the same
# column because the choice of transitions precludes that possibility.
def transition_test(fsm, cur_square, next_square):
"""
Transition test
Check that the next square doesn't violate the "no queen attacking" rule by
looking at the rows and diagonals. The transition strategy (see
EightQueens) makes checking columns unnecessary.
"""
next_row, next_col = next_square
# check the row
for col in range(next_col):
if (next_row, col) in fsm.queens:
return False
# check the two diagonals so far, from (next_row, next_col)
diag_len = min(next_row, next_col)
for step in range(diag_len):
if (next_row - step - 1, next_col - step - 1) in fsm.queens:
return False
diag_len = min(7 - next_row, next_col)
for step in range(diag_len):
if (next_row + step + 1, next_col - step - 1) in fsm.queens:
return False
return True
class EightQueens(Backtracking, StateMachine):
"""
Solve the 8-queens problem using backtracking.
State Strategy:
The state names are 'init', 'final' and the coordinates (row, col) on
the chessboard. There is no state data, the names contain all the
information we need.
Transition Strategy:
'init' transitions to the first 4 rows of column 0. We don't bother
transitioning to the last 4 rows because by symmetry if there is a
solution with a queen in the last 4 squares of the first column, then
one can flip the board to get a solution in the first 4 rows.
Each square in any given column transitions to all the squares of the
next column, except for the squares on the same row or diagonal.
All squares of the last column transition to 'final'.
Attributes:
queens: Set of (row, col) coordinates, each being the position of one
of the eight queens.
"""
def __init__(self):
super().__init__(initial='init')
self.queens = set()
# Add states for each square
for row in range(8):
for col in range(8):
self.add_state((row, col), activity=EightQueens.place_queen)
# Add transitions
for row in range(8):
# Transitions from 'init' and to 'final'.
if row < 4: self.add_transition('init', (row, 0))
self.add_transition((row, 7), 'final')
for col in range(7):
for next_row in range(8):
# Short circuit impossible transitions
if next_row == row: continue
if next_row == row + 1: continue
if next_row == row - 1: continue
self.add_transition((row, col), (next_row, col + 1),
transition_test)
def place_queen(self):
"""In a square state, meaning we place a queen on this square."""
self.queens.add(self.current_state)
@state("init")
def init(self):
"""
Starting state for our search.
This state is nee | ded so that we have something to backtrack to | in order
to choose a different first column queen. It isn't absolutely
necessary since we know there is a solution with a queen in the first
row of the first column.
"""
pass
@state("final", accepting=True)
def final(self):
"""A solution is found, draw the board."""
raise Accept()
def draw(self):
"""Draw the board as text."""
sys.stdout.write('\n')
for row in range(8):
for col in range(8):
if (row, col) in self.queens:
sys.stdout.write(' Q ')
else:
sys.stdout.write(' . ')
sys.stdout.write('\n')
def on_backtrack(self, occ):
"""Undo the placing of a queen in response to backtracking."""
super().on_backtrack(occ)
# Uncomment these two lines to display the state of the board each time
# backtracking occurs.
# print()
# self.draw()
if occ.state not in ['init', 'final']:
self.queens.remove(self.current_state)
def on_exhausted(self):
"""
Handle the case where no solution can be found.
This should never happen.
"""
raise Exception("No solution found!")
if __name__ == "__main__":
solver = EightQueens()
solver.run()
solver.draw()
with open("eight_queens.gv", "w") as gv_file:
diagram(solver, gv_file)
|
gillett-hernandez/project-euler | Python/problem_14.py | Python | mit | 954 | 0.019916 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: Gillett Hernandez
# @ | Date: | 2016-07-14 17:06:40
# @Last Modified by: Gillett Hernandez
# @Last Modified time: 2016-09-03 00:19:08
def Collatz(n, seen={}):
steps_list = [n]
if n in seen:
return seen[n]
steps=1
i=n
while i>1:
steps+=1
if i%2==0:
i=i/2
else:
i=3*i+1
if i in seen:
return steps+seen[i][0], steps_list+seen[i][1]
steps_list.append(i)
seen[n]=[steps, steps_list]
for i, k in enumerate(steps_list):
seen[k]=[len(steps_list[i:]), steps_list[i:]]
return steps, steps_list
maxChain = 0
for x in range(1,1000000):
chain=Collatz(x)
if chain[0] > maxChain:
maxChain=chain[0]
ind=x
if __name__ == '__main__':
# import project_euler_offline
print(ind)
# print("Correct" if project_euler_offline.check(prob_n,res) else "Incorrect")
|
nephantes/dolphin-tools | docs/conf.py | Python | mit | 9,231 | 0.006067 | # -*- coding: utf-8 -*-
#
# dolphintools documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 12 22:39:14 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'dolphintools'
copyright = u'2015, Alper Kucukural'
author = u'Alper Kucukural'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_t | itle.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favico | n of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'dolphintoolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dolphintools.tex', u'dolphintools Documentation',
u'Alper Kucukural', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------- |
webmasterraj/FogOrNot | flask/lib/python2.7/site-packages/pandas/core/internals.py | Python | gpl-2.0 | 150,471 | 0.000764 | import copy
import itertools
import re
import operator
from datetime import datetime, timedelta
from collections import defaultdict
import numpy as np
from pandas.core.base import PandasObject
from pandas.core.common import (_possibly_downcast_to_dtype, isnull,
_NS_DTYPE, _TD_DTYPE, ABCSeries, is_list_like,
ABCSparseSeries, _infer_dtype_from_scalar,
is_null_datelike_scalar, _maybe_promote,
is_timedelta64_dtype, is_datetime64_dtype,
array_equivalent, _maybe_convert_string_to_object,
is_categorical)
from pandas.core.index import Index, MultiIndex, _ensure_index
from pandas.core.indexing import maybe_convert_indices, length_of_indexer
from pandas.core.categorical import Categorical, maybe_to_categorical
import pandas.core.common as com
from pandas.sparse.array import _maybe_to_sparse, SparseArray
import pandas.lib as lib
import pandas.tslib as tslib
import pandas.computation.expressions as expressions
from pandas.util.decorators import cache_readonly
from pandas.tslib import Timestamp, Timedelta
from pandas import compat
from pandas.compat import range, map, zip, u
from pandas.tseries.timedeltas import _coerce_scalar_to_timedelta_type
from pandas.lib import BlockPlacement
class Block(PandasObject):
"""
Canonical n-dimensional unit of homogeneous dtype contained in a pandas
data structure
Index-ignorant; let the container take care of that
"""
__slots__ = ['_mgr_locs', 'values', 'ndim']
is_numeric = False
is_float = False
is_integer = False
is_complex = False
is_datetime = False
is_timedelta = False
is_bool = False
is_object = False
is_categorical = False
is_sparse = False
_can_hold_na = False
_downcast_dtype = None
_can_consolidate = True
_verify_integrity = True
_validate_ndim = True
_ftype = 'dense'
_holder = None
def __init__(self, values, placement, ndim=None, fastpath=False):
if ndim is None:
ndim = values.ndim
elif values.ndim != ndim:
raise ValueError('Wrong number of dimensions')
self.ndim = ndim
self.mgr_locs = placement
self.values = values
if len(self.mgr_locs) != len(self.values):
raise ValueError('Wrong number of items passed %d,'
' placement implies %d' % (
len(self.values), len(self.mgr_locs)))
@property
def _consolidate_key(self):
return (self._can_consolidate, self.dtype.name)
@property
def _is_single_block(self):
return self.ndim == 1
@property
def is_view(self):
""" return a boolean if I am possibly a view """
return self.values.base is not None
@property
def is_datelike(self):
""" return True if I am a non-datelike """
return self.is_datetime or self.is_timedelta
def is_categorical_astype(self, dtype):
"""
validate that we have a astypeable to categorical,
returns a boolean if we are a categorical
"""
if com.is_categorical_dtype(dtype):
if dtype == com.CategoricalDtype():
return True
# this is a pd.Categorical, but is not
# a valid type for astypeing
raise TypeError("invalid type {0} for astype".format(dtype))
return False
def to_dense(self):
return self.values.view()
@property
def fill_value(self):
return np.nan
@property
def mgr_locs(self):
return self._mgr_locs
@property
def array_dtype(self):
""" the dtype to return if I want to construct this block as an array """
return self.dtype
def make_block_same_class(self, values, placement, copy=False, fastpath=True,
**kwargs):
"""
Wrap given values in a block of same type as self.
`kwargs` are used in SparseBlock override.
"""
if copy:
values = values.copy()
return make_block(values, placement, klass=self.__class__,
fastpath=fastpath, **kwargs)
@mgr_locs.setter
def mgr_locs(self, new_mgr_locs):
if not isinstance(new_mgr_locs, BlockPlacement):
new_mgr_locs = BlockPlacement(new_mgr_locs)
self._mgr_locs = new_mgr_locs
def __unicode__(self):
# don't want to print out all of the items here
name = com.pprint_thing(self.__class__.__name__)
if self._is_single_block:
result = '%s: %s dtype: %s' % (
name, len(self), self.dtype)
else:
shape = ' x '.join([com.pprint_thing(s) for s in self.shape])
result = '%s: %s, %s, dtype: %s' % (
name, com.pprint_thing(self.mgr_locs.indexer), shape,
self.dtype)
return result
def __len__(self):
return len(self.values)
def __getstate__(self):
return self.mgr_locs.indexer, self.values
def __setstate__(self, state):
self.mgr_locs = BlockPlacement(state[0])
self.values = state[1]
self.ndim = self.values.ndim
def _slice(self, slicer):
""" return a slice of my values """
return self.values[slicer]
def reshape_nd(self, labels, shape, ref_items):
"""
Parameters
----------
labels : list of new axis labels
shape : new shape
ref_items : new ref_items
return a new block that is transformed to a nd block
"""
return _block2d_to_blocknd(
values=self.get_values().T,
placement=self.mgr_locs,
shape=shape,
labels=labels,
ref_items=ref_items)
def getitem_block(self, slicer, new_mgr_locs=None):
"""
Perform __getitem__-like, return result as block.
As of now, only supports slices that preserve dimensionality.
"""
if new_mgr_locs is None:
if isinstance(slicer, tuple):
axis0_slicer = slicer[0]
else:
axis0_slicer = slicer
new_mgr_locs = self.mgr_locs[axis0_slicer]
new_values = self._slice(slicer)
if self._validate_ndim and new_values.ndim != self.ndim:
raise ValueError("Only same dim slicing is allowed")
return self.make_block_same_class(new_values, new_mgr_locs)
@property
def shape(self):
return self.values.shape
@property
def itemsize(self):
return self.values.itemsize
@property
def dtype(self):
return self.values.dtype
@property
def ftype(self):
return "%s:%s" % (self.dtype, self._ftype)
def merge(self, other):
return _merge_blocks([self, other])
def reindex_axis(self, indexer, method=None, axis=1, fill_value=None,
limit=None, mask_info=None):
"""
Reindex using pre-computed indexer information
"""
if axis < 1:
raise AssertionError('axis must be at least 1, got %d' % axis)
if fill_value is None:
fill_value = self.fill_value
new_values = com.take_nd(self.values, indexer, axis,
fill_value=fill_value, mask_info=mask_info)
return make_block(new_values,
ndim=self.ndim, fastpath=True,
placement=self.mgr_locs)
def get(self, item):
loc = self.items.get_loc(item)
return self.values[loc]
def iget(self, i):
return self.values[i]
def set(self, locs, values, check=False):
"""
Modify Block in-place with new item value
Returns |
-------
None
"""
self.values[locs] = values
def delete(self, loc):
"""
Delete given loc(-s) from block in-place.
"""
self.values = np.delete(self.values, loc, 0)
self.mgr_locs = self.m | gr_ |
greenoaktree/gr-burst | python/qa_synchronizer_v2.py | Python | gpl-3.0 | 1,453 | 0.009635 | # GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import numpy
from time import sleep
import synchronizer_v2
import pmt
import scipy.io as sio
import os
class qa_synchronizer_v2 (gr_unittest.TestCase):
def setUp (self):
sps = 2
Fs = 100000.0
self.syncBlock = synchronizer_v2.synchronizer_v2(sps, Fs)
print os.getcwd()
x = sio.loadmat('../../matlab/gr_impl_test.mat')
dt = numpy.dtype(numpy.complex64)
| self.burst1 = x['burst1'].transpose().astype(dt)
self.burst2 = x['burst2'].transpose().astyp | e(dt)
self.burst3 = x['burst3'].transpose().astype(dt)
self.burst4 = x['burst4'].transpose().astype(dt)
self.burst5 = x['burst5'].transpose().astype(dt)
def tearDown (self):
None
def test_001_t (self):
print 'Running Synchronizer Test 1'
pmtMsg = pmt.cons(pmt.PMT_NIL, pmt.to_pmt(self.burst1))
self.syncBlock.enableDebugMode('/tmp/gr_test1.mat')
self.syncBlock.handler(pmtMsg)
if __name__ == '__main__':
gr_unittest.run(qa_synchronizer_v2, "qa_synchronizer_v2.xml")
|
Happyholic1203/VNF-Deployment | lib/utils.py | Python | mit | 5,154 | 0.002134 | from solver import *
from ds import *
import random
import time
class TestCase(object):
def __init__(self, name, flowSettings=None,
depth=3, fanout=2, minFlowAmount=0, maxFlowAmount=1,
minNumFlowsPerHost=2, maxNumFlowsPerHost=5):
if not self.checkFlowSettings(flowSettings):
raise Exception('Invalid flow settings: %r' % (flowSettings))
self.name = name
self.flowSettings = flowSettings
self.minNumFlowsPerHost = minNumFlowsPerHost
self.maxNumFlowsPerHost = maxNumFlowsPerHost
self.depth = depth
self.fanout = fanout
if self.flowSettings is None:
self.flowSettings = [
{
'minFlowAmount': minFlowAmount,
'maxFlowAmount': maxFlowAmount,
'ratio': 1
}
]
print '> Building tree... (depth = %d, fanout = %d, maxNumFlowsPerHost = %d)'\
% (self.depth, self.fanout, self.maxNumFlowsPerHost)
self.tree = self.buildTree(self.depth, self.fanout)
print '> Done building tree'
def reset(self):
self.tree.reset()
return self
def buildTree(self, depth, fanout):
tree = Tree(depth=depth, fanout=fanout)
hosts = tree.getHosts()
for h in hosts:
for _ in range(self.getNumFlowsPerHost()):
h.addFlow(self.getFlowAmount())
return tree
def getNumFlowsPerHost(self):
return random.randint(
self.minNumFlowsPerHost,
self.maxNumFlowsPerHost)
def getFlowAmount(self):
uniform = random.uniform(0, 1.0)
accumulatedProb = 0
for flowSetting in self.flowSettings:
accumulatedProb += flowSetting['ratio']
if accumulatedProb >= uniform:
return random.uniform(flowSetting['minFlowAmount'], flowSetting['maxFlowAmount'])
raise Exception('The ratio in flowSettings do not add up to 1.0')
# TODO
def run(self, solvers, drawPng=None, pngOutputDir=None, printVmPlacement=True, printSummary=True):
for s in solvers:
assert isinstance(s, Solver)
if not pngOutputDir:
pngOutputDir = '.'
allFlows = self.tree.getFlows()
numFlows = len(allFlows)
numHosts = len(self.tree.getHosts())
flowAmounts = [f.getCapacity() for f in allFlows]
flowsPerHostList = [len(flows) for flows in [h.getFlows() for h in self.tree.getHosts()]]
totalFlowAmount = self.tree.getTotalFlowAmount()
print '** TestCase: %s' % self.name
print '** Number of Hosts: %d' % numHosts
print '** Number of Flows per Host: (min, max, avg) = (%d, %d, %.2f)' % (
min(flowsPerHostList),
max(flowsPerHostList),
(float(numFlows) / float(numHosts)))
print '** Total Number of Flows: %d' % numFlows
print '** Flow Amount: (min, max, avg) = (%.2f, %.2f, %.2f)' % (
min(flowAmounts),
max(flowAmounts),
(totalFlowAmount / float(numFlows)))
print '** Total Flow Amount: %.2f' % totalFlowAmount
result = {
'name': self.name,
'numHosts': numHosts,
'flowsPerHost': {
'min': min(flowsPerHostList),
'max': max(flowsPerHostList),
'avg': (float(numFlows) / float(numHosts))
},
'totalNumFlows': numFlows,
'flowAmount': {
'min': min | (flowAmounts),
'max': max(flowAmounts),
| 'avg': (totalFlowAmount / float(numFlows))
},
'totalFlowAmount': totalFlowAmount
}
records = []
for solver in solvers:
startTime = time.time()
solver.solve(self.tree)
print '** Runtime [%s]: %.2f seconds' % \
(solver.getName(), time.time() - startTime)
if drawPng:
self.tree.draw(
'%s/%s(%s).png' % (pngOutputDir, self.name, solver.getName()),
showFlowCapacity=True,
showFlowVm=True)
records.append({
'solver': solver.getName(),
'numVms': solver.getNumVms(),
'cost': solver.getTotalCost()
})
# print solver.getSolution(
# showVmPlacement=printVmPlacement,
# showSummary=printSummary)
self.reset()
solver.reset()
result['records'] = records
self.printRecords(records)
return result
@staticmethod
def printRecords(records):
fmt = '{:>20}{:>10}{:>10}'
print fmt.format('solver', 'numVms', 'cost')
print '---------------------------------------------'
for rec in records:
print fmt.format(
rec['solver'],
rec['numVms'],
'%.2f' % rec['cost'])
print ''
# TODO
def checkFlowSettings(self, flowSettings):
if not flowSettings:
return True
return True |
msfrank/Higgins | higgins/server.py | Python | lgpl-2.1 | 8,312 | 0.003128 | # Higgins - A multi-media server
# Copyright (c) 2007-2009 Michael Frank <msfrank@syntaxjockey.com>
#
# This program is free software; for license information see
# the COPYING file.
import sys, pwd, grp, os, signal
from django.core.management import call_command as django_admin_command
from twisted.internet import reactor
from twisted.internet.defer import maybeDeferred
from twisted.python import usage
from twisted.python.logfile import LogFile
from higgins.site_settings import site_settings
from higgins.conf import conf
from higgins.loader import PluginLoader
from higgins import logger, VERSION
class ServerException(Exception):
def __init__(self, reason):
self.reason = reason
def __str__(self):
return str(self.reason)
class Server(object, logger.Loggable):
log_domain = "core"
def __init__(self, env, create=False, debug=False, verbosity=logger.LOG_WARNING):
| """
Initialize the application
"""
# check runtime dependencies
try:
import twisted
import django
import xml.etree.ElementTree
import mutagen
import setuptools
except ImportError, e:
raise ServerException("%s: make sure the corresponding python package is installed and in your PYTHONPATH." % e)
# create environment directory if necessary
if create:
| try:
os.makedirs(env, 0755)
os.makedirs(os.path.join(env, 'logs'), 0755)
os.makedirs(os.path.join(env, 'plugins'), 0755)
os.makedirs(os.path.join(env, 'media'), 0755)
except Exception, e:
raise ServerException("Startup failed: couldn't create directory %s (%s)" % (env, e.strerror))
# verify that environment directory exists and is sane
if not os.access(env, os.F_OK):
raise ServerException("Startup failed: Environment directory %s doesn't exist." % env)
if not os.access(env, os.R_OK):
raise ServerException("Startup failed: %s is not readable by Higgins." % env)
if not os.access(env, os.W_OK):
raise ServerException("Startup failed: %s is not writable by Higgins." % env)
if not os.access(env, os.X_OK):
raise ServerException("Startup failed: %s is not executable by Higgins." % env)
# set HIGGINS_DIR and DATABASE name
site_settings['HIGGINS_DIR'] = env
site_settings['DATABASE_NAME'] = os.path.join(env, "database.dat")
# if debug flag is specified, then don't request to daemonize and log to stdout
if debug:
self.daemonize = False
self.observer = logger.StdoutObserver(colorized=True, verbosity=verbosity)
# otherwise daemonize and log to logs/higgins.log
else:
self.daemonize = True
self.observer = logger.LogfileObserver(LogFile('higgins.log', os.path.join(env, 'logs')), verbosity=verbosity)
self.observer.start()
self.log_info("Higgins version is %s" % VERSION)
if create:
self.log_info("created new environment in " + env)
# set pid file
self._pidfile = os.path.join(env, "higgins.pid")
if os.path.exists(self._pidfile):
try:
f = open(self._pidfile)
pid = int(f.read())
f.close()
except Exception, e:
self.log_error("failed to parse PID file '%s': %s" % (self._pidfile, e))
else:
self.log_error("Startup failed: another instance is already running with PID %i" % pid)
raise ServerException("failed to start Higgins")
# we load conf after parsing options, but before syncing the db tables
conf.load(os.path.join(env, 'settings.dat'))
# create and upgrade db tables if necessary
#django_admin_command('syncdb')
from higgins.core.upgradedb import check_version
check_version(site_settings['DATABASE_NAME'])
# load the list of plugins
self._plugins = PluginLoader()
def _caughtSignal(self, signum, stack):
self.log_debug("caught signal %i" % signum)
self.stop()
def run(self):
"""
Pass control of the application to twisted
"""
try:
open(self._pidfile, 'wb').write(str(os.getpid()))
except Exception, e:
self.log_error("failed to create PID file '%s': %s" % (self._pidfile, e))
raise ServerException("failed to create PID file")
try:
from higgins.core.service import CoreService
self._core_service = CoreService()
# register plugins
for name,plugin in self._plugins:
self._core_service.registerPlugin(name, plugin)
# start the core service
self._core_service.startService()
# pass control to reactor
self.log_info("starting twisted reactor")
self._oldsignal = signal.signal(signal.SIGINT, self._caughtSignal)
reactor.run()
signal.signal(signal.SIGINT, self._oldsignal)
self.log_debug("returned from twisted reactor")
# save configuration settings
conf.flush()
finally:
try:
os.unlink(self._pidfile)
except Exception, e:
self.log_warning("failed to remove PID file '%s': %s" % (self._pidfile, e))
self.observer.stop()
def _doStop(self, result):
self.log_debug("stopped core service")
reactor.stop()
self.log_info("stopped twisted reactor")
def stop(self):
"""
Stop the twisted reactor
"""
if not reactor.running:
raise Exception("Server is not running")
if not self._core_service.running:
raise Exception("CoreService is not running")
# stops everything
d = maybeDeferred(self._core_service.stopService)
d.addCallback(self._doStop)
class ServerOptions(usage.Options):
optFlags = [
["create", "c", None],
["debug", "d", None],
]
def __init__(self):
usage.Options.__init__(self)
self['create'] = False
self['verbosity'] = logger.LOG_WARNING
self['debug'] = False
def opt_verbose(self):
if self['verbosity'] < logger.LOG_DEBUG2:
self['verbosity'] = self['verbosity'] + 1
opt_v = opt_verbose
def opt_quiet(self):
if self['verbosity'] > logger.LOG_FATAL:
self['verbosity'] = self['verbosity'] - 1
opt_q = opt_quiet
def parseArgs(self, env):
self['env'] = env
def opt_help(self):
print "Usage: %s [OPTION]... ENV" % os.path.basename(sys.argv[0])
print ""
print " -c,--create Create the environment if necessary"
print " -d,--debug Run in the foreground, and log to stdout"
print " -q Log errors only (-qq to log nothing)"
print " -v Increase logging (up to -vvv)"
print " --help Display this help"
print " --version Display the version"
sys.exit(0)
def opt_version(self):
print "Higgins version " + VERSION
sys.exit(0)
def run_application():
"""
This is the entry point for running Higgins from the higgins-media-server script,
which is generated at build time by setuptools.
"""
# parse options
o = ServerOptions()
try:
o.parseOptions(sys.argv[1:])
except usage.UsageError, e:
print "Error parsing options: %s" % e
print ""
print "Try %s --help for usage information." % os.path.basename(sys.argv[0])
sys.exit(1)
except Exception, e:
print "%s, exiting" % e
sys.exit(1)
# initialize the server
server = Server(o['env'], create=o['create'], debug=o['debug'], verbosity=o['verbosity'])
# fork into the background
if server.daemonize:
if os.fork():
os._exit(0)
null = os.open('/dev/null', os.O_RDWR)
|
gale320/newfies-dialer | newfies/apirest/view_contenttype.py | Python | mpl-2.0 | 1,134 | 0.000882 | # -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the te | rms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from rest_framework import viewsets
from apirest.content_type_serializers import ContentTypeSerializer
from django.contrib.content | types.models import ContentType
from rest_framework.permissions import IsAuthenticated
from rest_framework.authentication import BasicAuthentication, SessionAuthentication
from permissions import CustomObjectPermissions
class ContentTypeViewSet(viewsets.ReadOnlyModelViewSet):
"""
API endpoint that allows content_type to be viewed or edited.
"""
queryset = ContentType.objects.all()
serializer_class = ContentTypeSerializer
authentication = (BasicAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated, CustomObjectPermissions)
|
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/django_extensions/management/commands/graph_models.py | Python | bsd-3-clause | 7,278 | 0.00371 | import six
import sys
from optparse import make_option, NO_DEFAULT
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_extensions.management.modelviz import generate_dot
try:
import pygraphviz
HAS_PYGRAPHVIZ = True
except ImportError:
HAS_PYGRAPHVIZ = False
try:
import pydot
HAS_PYDOT = True
except ImportError:
HAS_PYDOT = False
class Command(BaseCommand):
graph_models_options = (
make_option('--pygraphviz', action='store_true', dest='pygraphviz',
help='Use PyGraphViz to generate the image.'),
make_option('--pydot', action='store_true', dest='pydot',
help='Use PyDot to generate the image.'),
make_option('--disable-fields', '-d', action='store_true', dest='disable_fields',
help='Do not show the class member fields'),
make_option('--group-models', '-g', action='store_true', dest='group_models',
help='Group models together respective to their application'),
make_option('--all-applications', '-a', action='store_true', dest='all_applications',
help='Automatically include all applications from INSTALLED_APPS'),
make_option('--output', '-o', action='store', dest='outputfile',
help='Render output file. Type of output dependend on file extensions. Use png or jpg to render graph to image.'),
make_option('--layout', '-l', action='store', dest='layout', default='dot',
help='Layout to be used by GraphViz for visualization. Layouts: circo dot fdp neato nop nop1 nop2 twopi'),
make_option('--verbose-names', '-n', action='store_true', dest='verbose_names',
help='Use verbose_name of models and fields'),
make_option('--language', '-L', action='store', dest='language',
help='Specify language used for verbose_name localization'),
make_option('--exclude-columns', '-x', action='store', dest='exclude_columns',
help='Exclude specific column(s) from the graph. Can also load exclude list from file.'),
make_option('--exclude-models', '-X', action='store', dest='exclude_models',
help='Exclude specific model(s) from the graph. Can also load exclude list from file.'),
make_option('--include-models', '-I', action='store', dest='include_models',
help='Restrict the graph to specified models.'),
make_option('--inheritance', '-e', action='store_true', dest='inheritance', default=True,
help='Include inheritance arrows (default)'),
make_option('--no-inheritance', '-E', action='store_false', dest='inheritance',
help='Do not include inheritance arrows'),
make_option('--hide-relations-from-fields', '-R', action='store_false', dest="relations_as_fields",
default=True, help="Do not show relations as fields in the graph."),
make_option('--disable-sort-fields', '-S', action="store_false", dest="sort_fields",
default=True, help="Do not sort fields"),
)
option_list = BaseCommand.option_list + graph_models_options
help = "Creates a GraphViz dot file for the specified app names. You can pass multiple app names and they will all be combined into a single model. Output is usually directed to a dot file."
args = "[appname]"
label = 'application name'
requires_model_validation = True
can_import_settings = True
def handle(self, *args, **options):
self.options_from_settings(options)
if len(args) < 1 and not options['all_applications']:
raise CommandError("need one or more arguments for appname")
use_pygraphviz = options.get('pygraphviz', False)
use_pydot = options.get('pydot', False)
cli_options = ' '.join(sys.argv[2:])
dotdata = generate_dot(args, cli_options=cli_options, **options)
dotdata = dotdata.encode('utf-8')
if options['outputfile']:
if not use_pygraphviz and not use_pydot:
if HAS_PYGRAPHVIZ:
use_pygraphviz = True
elif HAS_PYDOT:
use_pydot = True
if use_pygraphviz:
self.render_output_pygraphviz(dotdata, **options)
elif use_pydot:
self.render_output_pydot(dotdata, **options)
else:
raise CommandError("Neither pygraphviz nor pydot could be found to generate the image")
else:
self.print_output(dotdata)
def options_from_settings(self, options):
defaults = getattr(settings, 'GRAPH_MODELS', None)
if defaults:
for option in self.graph_models_options:
long_opt = option._long_opts[0]
if long_opt:
long_opt = long_opt.lstrip("-").replace("-", "_")
if long_opt in defaults:
default_value = None
if not option.default == NO_DEFAULT:
default_value = option.default
if options[option.dest] == default_value:
options[option.dest] = defaults[long_opt]
def print_output(self, dotdata):
if six.PY3 and isinstance(dotdata, six.binary_type):
dotdata = dotdata.decode()
print(dotdata)
def render_output_pygraphviz(self, dotdata, **kwargs):
"""Renders the image using pygraphviz"""
if not HAS_ | PYGRAPHVIZ:
raise CommandError("You need to install pygraphviz python module")
version = pygraphviz.__version__.rstrip("-svn")
try:
if tuple(int(v) for v in version.split('.')) < (0, 36):
# HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version)
import tempfile
| tmpfile = tempfile.NamedTemporaryFile()
tmpfile.write(dotdata)
tmpfile.seek(0)
dotdata = tmpfile.name
except ValueError:
pass
graph = pygraphviz.AGraph(dotdata)
graph.layout(prog=kwargs['layout'])
graph.draw(kwargs['outputfile'])
def render_output_pydot(self, dotdata, **kwargs):
"""Renders the image using pydot"""
if not HAS_PYDOT:
raise CommandError("You need to install pydot python module")
graph = pydot.graph_from_dot_data(dotdata)
if not graph:
raise CommandError("pydot returned an error")
output_file = kwargs['outputfile']
formats = ['bmp', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dot', 'dia', 'emf',
'em', 'fplus', 'eps', 'fig', 'gd', 'gd2', 'gif', 'gv', 'imap',
'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'metafile', 'pdf',
'pic', 'plain', 'plain-ext', 'png', 'pov', 'ps', 'ps2', 'svg',
'svgz', 'tif', 'tiff', 'tk', 'vml', 'vmlz', 'vrml', 'wbmp', 'xdot']
ext = output_file[output_file.rfind('.') + 1:]
format = ext if ext in formats else 'raw'
graph.write(output_file, format=format)
|
cherry-wb/SideTools | examples/graphicsview/collidingmice/collidingmice.py | Python | apache-2.0 | 7,219 | 0.004156 | #!/usr/bin/env python
############################################################################
##
## Copyright (C) 2006-2006 Trolltech ASA. All rights reserved.
##
## This file is part of the example classes of the Qt Toolkit.
##
## Licensees holding a valid Qt License Agreement may use this file in
## accordance with the rights, responsibilities and obligations
## contained therein. Please consult your licensing agreement or
## contact sales@trolltech.com if any conditions of this licensing
## agreement are not clear to you.
##
## Further information about Qt licensing is available at:
## http://www.trolltech.com/products/qt/licensing.html or by
## contacting info@trolltech.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
############################################################################
import math
from PySide import QtCore, QtGui
import mice_rc
class Mouse(QtGui.QGraphicsItem):
Pi = math.pi
TwoPi = 2.0 * Pi
# Create the bounding rectangle once.
adjust = 0.5
BoundingRect = QtCore.QRectF(-20 - adjust, -22 - adjust, 40 + adjust,
83 + adjust)
def __init__(self):
super(Mouse, self).__init__()
self.angle = 0.0
self.speed = 0.0
self.mouseEyeDirection = 0.0
self.color = QtGui.QColor(QtCore.qrand() % 256, QtCore.qrand() % 256,
QtCore.qrand() % 256)
self.rotate(QtCore.qrand() % (360 * 16))
# In the C++ version of this example, this class is also derived from
# QObject in order to receive timer events. PyQt does not support
# deriving from more than one wrapped class so we just create an
# explicit timer instead.
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.timerEvent)
self.timer.start(1000 / 33)
@staticmethod
def normalizeAngle(angle):
while angle < 0:
angle += Mouse.TwoPi
while angle > Mouse.TwoPi:
angle -= Mouse.TwoPi
return angle
def boundingRect(self):
return Mouse.BoundingRect
def shape(self):
path = QtGui.QPainterPath()
path.addRect(-10, -20, 20, 40)
return path;
def paint(self, painter, option, widget):
# Body.
painter.setBrush(self.color)
painter.drawEllipse(-10, -20, 20, 40)
# Eyes.
painter.setBrush(QtCore.Qt.white)
painter.drawEllipse(-10, -17, | 8, 8)
painter.drawEllipse(2, -17, 8, 8)
# Nose.
painter.setBrush(QtCore.Qt.black)
painter.drawEllipse(QtCore.QRectF(-2, -22, 4, 4))
# Pupils.
painter.drawEllipse(QtCore.QRectF(-8.0 + self.mouseEyeDirection, -17, 4, 4))
paint | er.drawEllipse(QtCore.QRectF(4.0 + self.mouseEyeDirection, -17, 4, 4))
# Ears.
if self.scene().collidingItems(self):
painter.setBrush(QtCore.Qt.red)
else:
painter.setBrush(QtCore.Qt.darkYellow)
painter.drawEllipse(-17, -12, 16, 16)
painter.drawEllipse(1, -12, 16, 16)
# Tail.
path = QtGui.QPainterPath(QtCore.QPointF(0, 20))
path.cubicTo(-5, 22, -5, 22, 0, 25)
path.cubicTo(5, 27, 5, 32, 0, 30)
path.cubicTo(-5, 32, -5, 42, 0, 35)
painter.setBrush(QtCore.Qt.NoBrush)
painter.drawPath(path)
def timerEvent(self):
# Don't move too far away.
lineToCenter = QtCore.QLineF(QtCore.QPointF(0, 0), self.mapFromScene(0, 0))
if lineToCenter.length() > 150:
angleToCenter = math.acos(lineToCenter.dx() / lineToCenter.length())
if lineToCenter.dy() < 0:
angleToCenter = Mouse.TwoPi - angleToCenter;
angleToCenter = Mouse.normalizeAngle((Mouse.Pi - angleToCenter) + Mouse.Pi / 2)
if angleToCenter < Mouse.Pi and angleToCenter > Mouse.Pi / 4:
# Rotate left.
self.angle += [-0.25, 0.25][self.angle < -Mouse.Pi / 2]
elif angleToCenter >= Mouse.Pi and angleToCenter < (Mouse.Pi + Mouse.Pi / 2 + Mouse.Pi / 4):
# Rotate right.
self.angle += [-0.25, 0.25][self.angle < Mouse.Pi / 2]
elif math.sin(self.angle) < 0:
self.angle += 0.25
elif math.sin(self.angle) > 0:
self.angle -= 0.25
# Try not to crash with any other mice.
dangerMice = self.scene().items(QtGui.QPolygonF([self.mapToScene(0, 0),
self.mapToScene(-30, -50),
self.mapToScene(30, -50)]))
for item in dangerMice:
if item is self:
continue
lineToMouse = QtCore.QLineF(QtCore.QPointF(0, 0), self.mapFromItem(item, 0, 0))
angleToMouse = math.acos(lineToMouse.dx() / lineToMouse.length())
if lineToMouse.dy() < 0:
angleToMouse = Mouse.TwoPi - angleToMouse
angleToMouse = Mouse.normalizeAngle((Mouse.Pi - angleToMouse) + Mouse.Pi / 2)
if angleToMouse >= 0 and angleToMouse < Mouse.Pi / 2:
# Rotate right.
self.angle += 0.5
elif angleToMouse <= Mouse.TwoPi and angleToMouse > (Mouse.TwoPi - Mouse.Pi / 2):
# Rotate left.
self.angle -= 0.5
# Add some random movement.
if len(dangerMice) > 1 and (QtCore.qrand() % 10) == 0:
if QtCore.qrand() % 1:
self.angle += (QtCore.qrand() % 100) / 500.0
else:
self.angle -= (QtCore.qrand() % 100) / 500.0
self.speed += (-50 + QtCore.qrand() % 100) / 100.0
dx = math.sin(self.angle) * 10
self.mouseEyeDirection = [dx / 5, 0.0][QtCore.qAbs(dx / 5) < 1]
self.rotate(dx)
self.setPos(self.mapToParent(0, -(3 + math.sin(self.speed) * 3)))
if __name__ == '__main__':
import sys
MouseCount = 7
app = QtGui.QApplication(sys.argv)
QtCore.qsrand(QtCore.QTime(0,0,0).secsTo(QtCore.QTime.currentTime()))
scene = QtGui.QGraphicsScene()
scene.setSceneRect(-300, -300, 600, 600)
scene.setItemIndexMethod(QtGui.QGraphicsScene.NoIndex)
for i in range(MouseCount):
mouse = Mouse()
mouse.setPos(math.sin((i * 6.28) / MouseCount) * 200,
math.cos((i * 6.28) / MouseCount) * 200)
scene.addItem(mouse)
view = QtGui.QGraphicsView(scene)
view.setRenderHint(QtGui.QPainter.Antialiasing)
view.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/images/cheese.jpg')))
view.setCacheMode(QtGui.QGraphicsView.CacheBackground)
view.setViewportUpdateMode(QtGui.QGraphicsView.BoundingRectViewportUpdate)
view.setDragMode(QtGui.QGraphicsView.ScrollHandDrag)
view.setWindowTitle("Colliding Mice")
view.resize(400, 300)
view.show()
sys.exit(app.exec_())
|
sdickreuter/python-andor | test.py | Python | gpl-3.0 | 660 | 0.009091 |
import numpy as np |
import time
import matplotlib.pyplot as plt
from AndorSpectrometer import Spectrometer
spec = Spectrometer(start_cooler=False,init_shutter=True)
#time.sleep(30)
spec.SetCentreWavelength(650)
spec.SetSlitWidth(100)
# spec.SetImageofSlit()
# slit = spec.TakeImageofSlit()
#
#
spec.SetSingleTrack()
spec | .SetExposureTime(5.0)
d = spec.TakeSingleTrack()
spec.SetExposureTime(1)
d2 = spec.TakeSingleTrack()
#
# spec.SetFullImage()
# img = spec.TakeFullImage()
#
#
# print(d.shape)
plt.plot(spec.GetWavelength(),d)
plt.show()
plt.plot(spec.GetWavelength(),d2)
plt.show()
# plt.imshow(img)
# plt.show()
#
# plt.imshow(slit)
# plt.show()
|
red-hood/calendarserver | contrib/performance/loadtest/trafficlogger.py | Python | apache-2.0 | 3,436 | 0.001746 | ##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
| # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either ex | press or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##
"""
This module implements a reactor wrapper which will cause all traffic on
connections set up using that reactor to be logged.
"""
__all__ = ['loggedReactor']
from weakref import ref
from StringIO import StringIO
from collections import namedtuple
from zope.interface import providedBy
from twisted.python.components import proxyForInterface
from twisted.internet.interfaces import IReactorTCP
from twisted.protocols.policies import WrappingFactory, TrafficLoggingProtocol
logstate = namedtuple('logstate', 'active finished')
def loggedReactor(reactor):
"""
Construct and return a wrapper around the given C{reactor} which provides
all of the same interfaces, but which will log all traffic over outgoing
TCP connections it establishes.
"""
bases = []
for iface in providedBy(reactor):
if iface is IReactorTCP:
bases.append(_TCPTrafficLoggingReactor)
else:
bases.append(proxyForInterface(iface, '_reactor'))
if bases:
return type('(Logged Reactor)', tuple(bases), {})(reactor)
return reactor
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
traffic to be logged.
"""
_factories = None
@property
def factories(self):
if self._factories is None:
self._factories = []
return self._factories
def getLogFiles(self):
active = []
finished = []
for factoryref in self.factories:
factory = factoryref()
active.extend(factory.logs)
finished.extend(factory.finishedLogs)
return logstate(active, finished)
def connectTCP(self, host, port, factory, *args, **kwargs):
wrapper = _TrafficLoggingFactory(factory)
self.factories.append(ref(wrapper, self.factories.remove))
return self._reactor.connectTCP(
host, port, wrapper, *args, **kwargs)
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
"""
LOGFILE_LIMIT = 20
protocol = TrafficLoggingProtocol
noisy = False
def __init__(self, wrappedFactory):
WrappingFactory.__init__(self, wrappedFactory)
self.logs = []
self.finishedLogs = []
def unregisterProtocol(self, protocol):
WrappingFactory.unregisterProtocol(self, protocol)
self.logs.remove(protocol.logfile)
self.finishedLogs.append(protocol.logfile)
del self.finishedLogs[:-self.LOGFILE_LIMIT]
def buildProtocol(self, addr):
logfile = StringIO()
self.logs.append(logfile)
return self.protocol(
self, self.wrappedFactory.buildProtocol(addr), logfile, None, 0)
|
timesqueezer/portfolio | run.py | Python | mit | 417 | 0.002398 | #!env/bin/python
import sys
from portfolio import create_app
class ProductionConfig(object):
DEBUG = False
TESTING = False
app = cre | ate_app(config=ProductionConfig)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '-p':
print(' * Ru | nning in production mode')
app = create_app(config=ProductionConfig)
else:
app = create_app()
app.run(host='0.0.0.0')
|
CloudBoltSoftware/cloudbolt-forge | blueprints/citrix/citrix_itm_predictive_dns_record/management/edit_record.py | Python | apache-2.0 | 4,201 | 0.001904 | from common.methods import set_progress
from utilities.models import ConnectionInfo
from servicecatalog.models import ServiceBlueprint
from infrastructure.models import CustomField
import json
from ast import literal_eval
import requests
API_CLIENT_CI = "Citrix API"
def create_custom_fields_as_needed():
CustomField.objects.get_or_create(
name='record_id',
defaults={
"label": 'Citrix DNS Record ID',
"type": 'STR',
}
)
CustomField.objects.get_or_create(
name='record_value',
defaults={
"label": 'Citrix DNS Record Value',
"type": 'STR',
}
)
CustomField.objects.get_or_create(
name='ttl',
defaults={
"label": 'Citrix DNS Record TTL',
"type": 'INT',
}
)
CustomField.objects.get_or_create(
name='recordType',
defaults={
"label": 'Citrix DNS Record Type',
"type": 'STR',
}
)
def get_citrix_url():
ci = ConnectionInfo.objects.get(name=API_CLIENT_CI)
return "{protocol}://{hostname}".format(protocol=ci.protocol, hostname=ci.ip)
def get_citrix_api_token():
# Citrix api uses tokens to authorise requests. The | tokens expires after a short while and has to be regenerated.
ci = ConnectionInfo.objects.get(name=API_CLIENT_CI)
url = get_citrix_url()
response = requests.get(
"{url}/api/oauth/token?client_id={client_id}&client_secret={client_secret}&grant_type=client_credentials".format(
url=url, client_id=ci.username, client_secret=ci.password))
t | oken = response.json().get('access_token')
return token
def generate_options_for_recordType(**kwargs):
return ["A", "AAAA", "MX"]
def generate_options_for_editRecordType(**kwargs):
return [(True, "Yes"), (False, "No")]
def generate_options_for_editRecordValue(**kwargs):
return [(True, "Yes"), (False, "No")]
def generate_options_for_editTTL(**kwargs):
return [(True, "Yes"), (False, "No")]
# Need a way to return a string instead of a list.
# def generate_options_for_value(**kwargs):
# resource = kwargs.get('resource')
# return literal_eval(resource.record_value).get('addresses')
def run(resource, *args, **kwargs):
create_custom_fields_as_needed()
addresses = literal_eval(resource.record_value).get('addresses')
set_progress(f"Addresses {addresses}")
_value = "".join(addresses)
url = f"https://portal.cedexis.com:443/api/v2/config/authdns.json/record/{resource.record_id}"
token = get_citrix_api_token()
editRecordValue = "{{ editRecordValue }}"
val = "{{ value }}"
value = val or _value
editTTL = "{{ editTTL }}"
ttl = "{{ ttl }}" or resource.ttl
editRecordType = "{{ editRecordType }}"
recordType = "{{ recordType }}" or resource.recordType
dnsZone = resource.citrix_zone_id
if not token:
return "FAILURE", "", "No token Authorization Token. Ensure you have set up your credentials on the " \
"connection info page "
head = {'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json'}
data = json.dumps({
"recordType": recordType,
"quickEdit": True,
"response": value,
"ttl": ttl,
"dnsZoneId": dnsZone,
"id": resource.record_id
}
)
response = requests.put(url=url, data=data, headers=head)
bp = ServiceBlueprint.objects.get(name='Citrix ITM Zone')
zone = [res for res in bp.resource_set.all() if res.citrix_zone_id == dnsZone]
if response.ok:
if val:
value = {"addresses": [val]}
resource.name = '{}- {}'.format(dnsZone, recordType)
resource.parent_resource = zone[0]
resource.record_id = response.json().get('id')
resource.record_value = value
resource.citrix_zone_id = response.json().get('dnsZoneId')
resource.recordType = recordType
resource.ttl = int(ttl)
resource.save()
return "SUCCESS", "Sample output message", ""
else:
return "FAILURE", "", "{}".format(response.json().get('errorDetails')[0].get('developerMessage'))
|
Widukind/dlstats | dlstats/tests/fetchers/test_bea.py | Python | agpl-3.0 | 6,159 | 0.00552 | # -*- coding: utf-8 -*-
import io
import os
from dlstats.fetchers.bea import BEA as Fetcher
import httpretty
from dlstats.tests.base import RESOURCES_DIR as BASE_RESOURCES_DIR
from dlstats.tests.fetchers.base import BaseFetcherTestCase
import unittest
from unittest import mock
RESOURCES_DIR = os.path.abspath(os.path.join(BASE_RESOURCES_DIR, "bea"))
DATA_BEA_10101_An = {
"filepath": os.path.abspath(os.path.join(RESOURCES_DIR, "nipa-section1.xls.zip")),
"DSD": {
"provider": "BEA",
"filepath": None,
"dataset_code": "nipa-section1-10101-a",
"dsd_id": "nipa-section1-10101-a",
"is_completed": True,
"categories_key": "nipa-section1",
"categories_parents": ["national", "nipa"],
"categories_root": ["national", "nipa", "nipa-fa2004", "nipa-underlying"],
"concept_keys": ['concept', 'frequency'],
"codelist_keys": ['concept', 'frequency'],
"codelist_count": {
"concept": 25,
"frequency": 1
},
"dimension_keys": ['concept', 'frequency'],
"dimension_count": {
"concept": 25,
"frequency": 1
},
"attribute_keys": [],
"attribute_count": None,
},
"series_accept": 25,
"series_reject_frequency": 0,
"series_reject_empty": 0,
"series_all_values": 1175,
"series_key_first": "A191RL1-A",
"series_key_last": "A191RP1-A",
"series_sample": {
'provider_name': 'BEA',
'dataset_code': 'nipa-section1-10101-a',
'key': 'A191RL1-A',
'name': 'Gross domestic product - Annually',
'frequency': 'A',
'last_update': None,
'first_value': {
'value': '3.1',
'period': '1969',
'attributes': None,
},
'last_value': {
'value': '2.4',
'period': '2015',
'attributes': None,
},
'dimensions': {
'concept': 'a191rl1',
"frequency": 'a'
},
'attributes': None,
}
}
def _get_datasets_settings(self):
return {
"nipa-section1-10101-a": {
'dataset_code': 'nipa-section1-10101-a',
'name': 'Table 1.1.1. Percent Change From Preceding Period in Real Gross Domestic Product - Annually',
'last_update': None,
'metadata': {
'filename': 'nipa-section1.xls.zip',
'sheet_name': '10101 Ann',
'url': 'http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2'
},
}
}
class FetcherTestCase(BaseFetcherTestCase):
# nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase
FETCHER_KLASS = Fetcher
DATASETS = {
'nipa-section1-10101-a': DATA_BEA_10101_An
}
DATASET_FIRST = "nipa-fa2004-section1-101-a"
DATASET_LAST = "nipa-underlying-section9-90500U-a"
DEBUG_MODE = False
def _load_files(self, dataset_code):
url = "http://www.bea.gov/national/nipaweb/GetCSV.asp?GetWhat=SS_Data/Section1All_xls.zip&Section=2"
self.register_url(url,
self.DATASETS[dataset_code]["filepath"])
@httpretty.activate
@unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test")
def test_load_datasets_first(self):
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertLoadDatasetsFirst([dataset_code])
@httpretty.activate
@unittest.skipUnless('FULL_TEST' in os.environ, "Skip - no full test")
def test_load_datasets_update(self):
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertLoadDatasetsUpdate([dataset_code])
#@httpretty.activate
@unittest.skipIf(True, "TODO")
def test_build_data_tree(self):
dataset_code = "nipa-section1-10101-a"
self.assertDataTree(dataset_code)
@httpretty.activate
| @mock.patch("dlstats.fetchers.bea.BEA._get_datasets_settings", _get_datasets_settings)
def test_upsert_dataset_10101(self):
# nosetests -s -v dlstats.tests.fetchers.test_bea:FetcherTestCase.test_upsert_dataset_10101
dataset_code = "nipa-section1-10101-a"
self._load_files(dataset_code)
self.assertProvider()
dat | aset = self.assertDataset(dataset_code)
names = {
'a191rl1': 'Gross domestic product',
'dpcerl1': 'Personal consumption expenditures',
'dgdsrl1': 'Personal consumption expenditures - Goods',
'ddurrl1': 'Personal consumption expenditures - Goods - Durable goods',
'dndgrl1': 'Personal consumption expenditures - Goods - Nondurable goods',
'dserrl1': 'Personal consumption expenditures - Services',
'a006rl1': 'Gross private domestic investment',
'a007rl1': 'Gross private domestic investment - Fixed investment',
'a008rl1': 'Gross private domestic investment - Fixed investment - Nonresidential',
'y033rl1': 'Gross private domestic investment - Fixed investment - Nonresidential - Equipment',
'a011rl1': 'Gross private domestic investment - Fixed investment - Residential',
'a020rl1': 'Net exports of goods and services - Exports',
'a191rp1': 'Addendum: - Gross domestic product, current dollars'
}
for k, v in names.items():
self.assertTrue(k in dataset["codelists"]["concept"])
self.assertEquals(dataset["codelists"]["concept"][k], v)
series_list = self.assertSeries(dataset_code)
series_keys = {s["key"].lower(): s for s in series_list}
for k, v in names.items():
search_k = "%s-a" % k
search_name = "%s - Annually" % v
self.assertTrue(search_k in series_keys, "%s not in series_keys" % search_k)
self.assertEquals(series_keys[search_k]["name"], search_name)
for series in series_list:
self.assertEquals(series["last_update_ds"], dataset["last_update"])
|
catapult-project/catapult | dashboard/dashboard/edit_anomalies_test.py | Python | bsd-3-clause | 6,510 | 0.002304 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import json
import unittest
import mock
import webapp2
import webtest
from google.appengine.api import users
from dashboard import edit_anomalies
from dashboard.common import testing_common
from dashboard.common import utils
from dashboard.common import xsrf
from dashboard.models import anomaly
class EditAnomaliesTest(testing_common.TestCase):
def setUp(self):
super(EditAnomaliesTest, self).setUp()
app = webapp2.WSGIApplication([('/edit_anomalies',
edit_anomalies.EditAnomaliesHandler)])
self.testapp = webtest.TestApp(app)
testing_common.SetSheriffDomains(['chromium.org'])
def tearDown(self):
super(EditAnomaliesTest, self).tearDown()
self.UnsetCurrentUser()
def _AddAnomaliesToDataStore(self):
anomaly.Anomaly(
start_revision=123456,
end_revision=123459,
median_before_anomaly=5,
median_after_anomaly=10,
bug_id=None,
test=utils.TestKey('a/b/c/d')).put()
anomaly.Anomaly(
start_revision=123460,
end_revision=123464,
median_before_anomaly=5,
median_after_anomaly=10,
bug_id=None,
test=utils.TestKey('a/b/c/d')).put()
anomaly.Anomaly(
start_revision=123465,
end_revision=123468,
median_before_anomaly=5,
median_after_anomaly=10,
bug_id=None,
test=utils.TestKey('a/b/c/d')).put()
return anomaly.Anomaly.query().fetch(keys_only=True)
def testPost_NoXSRFToken_Returns403Error(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'bug_id': 31337,
},
status=403)
self.assertIsNone(anomaly_keys[0].get().bug_id)
@mock.patch.object(utils, 'IsGroupMember', mock.MagicMock(return_value=False))
def testPost_LoggedIntoInvalidDomain_DoesNotModifyAnomaly(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.Se | tCurrentUser('foo@bar.com')
self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'bug_id': 31337,
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
},
status=403)
self.assertIsNone(anomaly_keys[0].get().bug_id)
| def testPost_LoggedIntoValidSheriffAccount_ChangesBugID(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'bug_id': 31337,
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual(31337, anomaly_keys[0].get().bug_id)
def testPost_RemoveBug(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
a = anomaly_keys[0].get()
a.bug_id = 12345
a.put()
self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'bug_id': 'REMOVE',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertIsNone(anomaly_keys[0].get().bug_id)
def testPost_ChangeBugIDToInvalidID_ReturnsError(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
a = anomaly_keys[0].get()
a.bug_id = 12345
a.put()
response = self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'bug_id': 'a',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual({'error': 'Invalid bug ID a'}, json.loads(response.body))
self.assertEqual(12345, anomaly_keys[0].get().bug_id)
def testPost_NoKeysGiven_Error(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('foo@chromium.org')
response = self.testapp.post(
'/edit_anomalies', {
'bug_id': 31337,
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual({'error': 'No alerts specified to add bugs to.'},
json.loads(response.body))
self.assertIsNone(anomaly_keys[0].get().bug_id)
def testPost_ChangeRevisions(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'new_start_revision': '123450',
'new_end_revision': '123455',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual(123450, anomaly_keys[0].get().start_revision)
self.assertEqual(123455, anomaly_keys[0].get().end_revision)
def testPost_NudgeWithInvalidRevisions_ReturnsError(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
start = anomaly_keys[0].get().start_revision
end = anomaly_keys[0].get().end_revision
response = self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'new_start_revision': 'a',
'new_end_revision': 'b',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual(start, anomaly_keys[0].get().start_revision)
self.assertEqual(end, anomaly_keys[0].get().end_revision)
self.assertEqual({'error': 'Invalid revisions a, b'},
json.loads(response.body))
def testPost_IncompleteParametersGiven_ReturnsError(self):
anomaly_keys = self._AddAnomaliesToDataStore()
self.SetCurrentUser('sullivan@chromium.org')
response = self.testapp.post(
'/edit_anomalies', {
'keys': json.dumps([anomaly_keys[0].urlsafe()]),
'new_start_revision': '123',
'xsrf_token': xsrf.GenerateToken(users.get_current_user()),
})
self.assertEqual({'error': 'No bug ID or new revision specified.'},
json.loads(response.body))
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.