code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# Copyright 2016 Sébastien Maccagnoni
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
"""Create an app for WSGI"""
from back import create_app
app = create_app()
| tiramiseb/awesomeshop | wsgi.py | Python | agpl-3.0 | 853 |
from slackclient import SlackClient
from matterhook import Webhook
from discord import (
Client as DiscordClient,
Intents as DiscordIntents,
Embed as DiscordEmbed,
)
from zou.app import config
import asyncio
def send_to_slack(app_token, userid, message):
client = SlackClient(token=app_token)
blocks = [{"type": "section", "text": {"type": "mrkdwn", "text": message}}]
client.api_call(
"chat.postMessage", channel="@%s" % userid, blocks=blocks, as_user=True
)
def send_to_mattermost(webhook, userid, message):
arg = webhook.split("/")
server = "%s%s//%s" % (arg[0], arg[1], arg[2])
hook = arg[4]
# mandatory parameters are url and your webhook API key
mwh = Webhook(server, hook)
mwh.username = "Kitsu - %s" % (message["project_name"])
mwh.icon_url = "%s://%s/img/kitsu.b07d6464.png" % (
config.DOMAIN_PROTOCOL,
config.DOMAIN_NAME,
)
# send a message to the API_KEY's channel
mwh.send(message["message"], channel="@%s" % userid)
def send_to_discord(current_app, token, userid, message):
async def send_to_discord_async(current_app, token, userid, message):
intents = DiscordIntents.default()
intents.members = True
client = DiscordClient(intents=intents)
@client.event
async def on_ready(
current_app=current_app, userid=userid, message=message
):
user_found = False
for user in client.get_all_members():
if (
"%s#%s" % (user.name, user.discriminator) == userid
and not user.bot
):
embed = DiscordEmbed()
embed.description = message
await user.send(embed=embed)
user_found = True
break
if not user_found:
current_app.logger.info(
"User %s not found by discord bot" % userid
)
await client.close()
await client.start(token)
asyncio.set_event_loop(asyncio.new_event_loop())
loop = asyncio.get_event_loop()
loop.run_until_complete(
send_to_discord_async(current_app, token, userid, message)
)
loop.close()
| cgwire/zou | zou/app/utils/chats.py | Python | agpl-3.0 | 2,279 |
## parts.py
##
## Objects which represent parts (e.g., activation function, weights, etc.) of a neural network. For
## constructing more complex neural networks
import tensorflow as tf
import numpy as np
# Constants for type of pooling layer to use
MAX_POOL = "MAX"
AVG_POOL = "AVG"
def weight_variable(shape, name=None, trainable=True, is_conv=True):
"""
Create a weight matrix
"""
if is_conv:
initializer = tf.contrib.layers.xavier_initializer_conv2d()
else:
initializer = tf.contrib.layers.xavier_initializer()
initializer = tf.truncated_normal_initializer(0, 0.02)
weights = tf.get_variable(name, shape=shape, dtype=tf.float32, initializer=initializer, trainable=trainable)
return weights
def bias_variable(shape, name=None, trainable=True):
"""
Create a bias variable
"""
initializer = tf.constant_initializer(0.0)
bias = tf.get_variable(name, shape=shape, dtype=tf.float32, initializer=initializer, trainable=trainable)
return bias
class PrimaryCaps_EM:
"""
A Primary Capsule Layer. See jhui.github.io blog for details. uses EM routing.
"""
def __init__(self, kernel_shape, pose_shape, num_output_capsules, **kwargs):
"""
Create a holder for the primary capsule layer
Arguments:
kernel_shape - shape of the kernels
pose_shape - the shape of the pose of each capsule
num_output_capsules - how many capsules to generate
Optional Arguments:
name - A name for the layer
strides - Stride of the kernel
padding - "SAME" or "VALID"
activation_function - ReLU
"""
# Simply hold on to the parameters for now
self.kernel_shape = kernel_shape
self.num_capsules = num_output_capsules
self.pose_shape = pose_shape
self.name = kwargs.get("name", None)
self.stride = kwargs.get("stride", 1)
self.padding = kwargs.get("padding", "SAME")
self.activation_function = kwargs.get("activation_function", tf.nn.relu)
# Placeholder for the weights for this layer
self.pose_weights = None
self.activation_weights = None
def build(self, input_layer, trainable=True):
"""
Construct the layer in tensorflow
"""
with tf.variable_scope(self.name):
# Get the number of input channels
input_shape = input_layer.get_shape()
num_input_channels = input_shape[-1].value
# Create the weights for the pose and activation
pose_weight_shape = [self.kernel_shape[0], self.kernel_shape[1], num_input_channels, self.pose_shape[0]*self.pose_shape[1]*self.num_capsules]
activation_weight_shape = [self.kernel_shape[0], self.kernel_shape[1], num_input_channels, self.num_capsules]
self.pose_weights = weight_variable(pose_weight_shape, 'w_pose_'+self.name, trainable)
self.activation_weights = weight_variable(activation_weight_shape, 'w_activation_'+self.name, trainable)
# Calculate the poses and activations - reshape pose to (-1, W, H, POSE_W, POSE_H, NUM_CAPSULES)
self.poses = tf.nn.conv2d(input_layer, self.pose_weights, strides=[1, self.stride, self.stride, 1], padding=self.padding)
self.poses = tf.reshape(self.poses, shape=[-1, input_shape[-3], input_shape[-2], self.num_capsules, self.pose_shape[0], self.pose_shape[1]])
self.activations = tf.nn.conv2d(input_layer, self.activation_weights, strides=[1, self.stride, self.stride, 1], padding=self.padding)
self.activations = tf.sigmoid(self.activations)
print self.poses.get_shape()
print self.activations.get_shape()
return self.poses, self.activations, self.pose_weights, self.activation_weights
class ConvCaps_EM:
"""
A Convolutional Capsule layer, using EM routing
"""
def __init__(self, kernel_shape, num_output_capsules, batch_size, **kwargs):
"""
"""
# Simply hold the parameters for now
self.kernel_shape = kernel_shape
self.num_capsules = num_output_capsules
self.batch_size = batch_size
self.name = kwargs.get("name", None)
self.stride = kwargs.get("stride", 1)
self.padding = kwargs.get("padding", "SAME")
self.num_em_steps = kwargs.get("num_em_steps", 3)
self.epsilon = kwargs.get("epsilon", 1e-8)
# Placeholder for the weights for this layer
self.pose_weights = None
self.beta_v = None
self.beta_a = None
# Placeholder for activations of this layer
self.votes = None
self.routing = None
# BLACK BOX helper functions, modified from jhui.github.io
def _tile(self, input_layer):
"""
Perform tiling and convolution to prepare the input pose and activation to the
correct spatial dimension for voting and EM-routing.
input_layer: a pose layer with shape (N, W, H, C, POSE_W, POSE_H) or
an activation layer with shape (N, W, H, C)
return: a tensor whose dimensions are (N, W, H, K, O)
K = the flattened kernel shape (kernel_width x kernel_height)
O = the flattened pose and/or activation (pose_width x pose_height x num_input_capsules)
"""
# Extract relevent sizes from the input
input_shape = input_layer.get_shape()
input_width = input_shape[1].value
input_height = input_shape[2].value
num_input_capsules = input_shape[3].value
kernel_width, kernel_height = self.kernel_shape
if len(input_shape) > 5: # Is this a pose tensor?
output_channel_size = num_input_capsules*input_shape[4].value*input_shape[5].value
else: # An activation tensor
output_channel_size = num_input_capsules
# Flatten the input so that it is (?, W, H, OUT_C)
input_flat = tf.reshape(input_layer, shape=[-1, input_width, input_height, output_channel_size])
# Create the tile filter operation
tile_filter = np.zeros(shape=[kernel_width, kernel_height, output_channel_size, kernel_width*kernel_height], dtype=np.float32)
for i in range(kernel_width):
for j in range(kernel_height):
tile_filter[i,j,:,i*kernel_height + j] = 1.0
tile_filter_op = tf.constant(tile_filter, dtype=tf.float32)
# Perform the tiling
output = tf.nn.depthwise_conv2d(input_flat, tile_filter_op, strides=[1, self.stride, self.stride, 1], padding='VALID')
# Get the width and height of the output
output_shape = output.get_shape()
output_width = output_shape[1].value
output_height = output_shape[2].value
# Put the right numbers in the right places
output = tf.reshape(output, shape=[-1, output_width, output_height, num_input_capsules, kernel_width*kernel_height])
output = tf.transpose(output, perm=[0,1,2,4,3])
return output
def _e_step(self, mean_h, stdev_h, activations, votes):
"""
Perform an expectation step, i.e., the routing assignment
mean_h: (N, OW, OH, 1, OC, PW*PH)
stdev_h: (N, OW, OH, 1, OC, PW*PH)
activations: (N, OW, OH, 1, OC, 1)
votes: (N, OW, OH, KW x KH x IC, OC, PW*PH)
return: routing
"""
# We are calculating the log probability for P
o_p0 = -tf.reduce_sum(tf.square(votes - mean_h) / (2*tf.square(stdev_h)), axis=-1, keep_dims=True)
o_p1 = -tf.reduce_sum(tf.log(stdev_h + self.epsilon), axis=-1, keep_dims=True)
# o_p is the probability density of the h-th component of the vote from i to j
# (N, OW, OH, 1, OC, PWxPH)
o_p = o_p0 + o_p1
# The routing is the softmax of the probability distributions
zz = tf.log(activations + self.epsilon) + o_p
routing_assignments = tf.nn.softmax(zz, dim=len(zz.get_shape().as_list())-2)
return routing_assignments
def _m_step(self, routing_assignments, votes, activations, beta_v, beta_a, temperature):
"""
routing_assignments: (KW x KH x IC, OC, 1)
votes: (N, OH, OW, KW x KH x IC, OC, PW x PH)
activations: (N, OH, OW, KW x KH x IC, 1, 1)
beta_v: (1, 1, 1, 1, OC, 1)
beta_a: (1, 1, 1, 1, OC, 1)
temperature: lambda
return out_mean, out_stdev, out_activation
"""
routing_prime = routing_assignments * activations
# Sum over all input capulse
routing_prime_sum = tf.reduce_sum(routing_prime, axis=-3, keep_dims=True, name='routing_prime_sum')
# Calculate mean and std_dev for all h
mean_h = tf.reduce_sum(routing_prime * votes, axis=-3, keep_dims=True) / routing_prime_sum
stdev_h = tf.sqrt(tf.reduce_sum(routing_prime * tf.square(votes - mean_h), axis=-3, keep_dims=True) / routing_prime_sum)
# Calculate cost
cost_h = (beta_v + tf.log(stdev_h + self.epsilon)) * routing_prime_sum
# The relative variance between each channel determines which one should activate
cost_sum = tf.reduce_sum(cost_h, axis=-1, keep_dims=True)
cost_mean = tf.reduce_mean(cost_sum, axis=-2, keep_dims=True)
cost_stdev = tf.sqrt(tf.reduce_sum(tf.square(cost_sum - cost_mean), axis=-2, keep_dims=True)/ cost_sum.get_shape().as_list()[-2])
cost_h = beta_a + (cost_mean - cost_sum) / (cost_stdev + self.epsilon)
# Activation - sigmoid(lambda * (beta_a - sum(cost)))
out_activation = tf.sigmoid(temperature * cost_h)
return mean_h, stdev_h, out_activation
def _routing(self, votes, activations):
"""
votes: (N, OW, OH, KW x KH x IC, OC, PW x PH)
activations: (N, OW, OH, KW x KH x IC)
return: pose, activation
"""
votes_shape = votes.get_shape().as_list()
with tf.variable_scope('em_routing'):
# Create the initial routing assignment as evenly distributed
routing = tf.constant(1.0 / votes_shape[-2], shape=votes_shape[-3:-1] + [1], dtype=tf.float32)
# Expand the dimensions of the activations
activations = activations[..., tf.newaxis, tf.newaxis]
# Similarly for beta_v and beta_a
beta_v = self.beta_v[..., tf.newaxis, :, tf.newaxis]
beta_a = self.beta_a[..., tf.newaxis, :, tf.newaxis]
# Temperature schedule
temp_min = 1.0
temp_max = min(self.num_em_steps, 3.0)
for step in range(self.num_em_steps):
with tf.variable_scope("iteration_%d" % step):
temp = temp_min + (temp_max - temp_min) * step / max(1.0, self.num_em_steps - 1.0)
mean_h, stdev_h, out_activations = self._m_step(routing, votes, activations, beta_v, beta_a, temperature=temp)
if step < self.num_em_steps - 1:
routing = self._e_step(mean_h, stdev_h, out_activations, votes)
# Now that the EM routing is done, calculate the output pose and activations
out_poses = tf.squeeze(mean_h, axis=-3)
out_activations = tf.squeeze(out_activations, axis=[-3,-1])
return out_poses, out_activations, routing
def _transform(self, _input, output_capsule_size, size, pose_width, pose_height, trainable=True):
"""
"""
print _input.get_shape()
print output_capsule_size
print size
print pose_width
print pose_height
num_input_capsules = _input.get_shape()[1].value
output = tf.reshape(_input, shape=[size, num_input_capsules, 1, pose_width, pose_height])
weight_shape = [1, num_input_capsules, output_capsule_size, pose_width, pose_height]
self.pose_weights = weight_variable(weight_shape, 'W_'+self.name, trainable=trainable, is_conv=False)
w = tf.tile(self.pose_weights, [size, 1, 1, 1, 1])
output = tf.tile(output, [1,1,output_capsule_size,1,1])
votes = tf.matmul(output, w)
votes = tf.reshape(votes, [size, num_input_capsules, output_capsule_size, pose_width*pose_height])
return votes
def build(self, pose_layer, activation_layer, trainable=True):
"""
Construct the convolution capsule layer
pose_layer: a primary or convolution capsule layer with shape (N, W, H, C, POSE_W, POSE_H)
activation_layer: (N, W, H, C)
"""
# Some useful numbers in a more legible format
pose_shape = pose_layer.get_shape()
kernel_width, kernel_height = self.kernel_shape
num_input_capsules = pose_shape[3].value
pose_width = pose_shape[4].value
pose_height = pose_shape[5].value
print pose_layer.get_shape()
with tf.variable_scope(self.name):
# Tile the activations and input poses
# The input capsules' pose matrices are tiled to the spatial dimension of the output, allowing multiplication
# later with the transformation matricies to generate votes
# The input capsules' activation matricies are tiled for EM routing
# Tile the pose matrix so that it can be multiplied with the transformation weights to generate the votes
input_poses = self._tile(pose_layer)
# Tile the activations for use with EM routing
input_activations = self._tile(activation_layer)
spatial_width = input_activations.get_shape()[1].value
spatial_height = input_activations.get_shape()[2].value
# Reshape the tensors for later operations
input_poses = tf.reshape(input_poses, shape=[-1, kernel_width*kernel_height*num_input_capsules, pose_width*pose_height])
input_activations = tf.reshape(input_activations, shape=[-1, spatial_width, spatial_height, kernel_width*kernel_height*num_input_capsules])
# Compute the votes
with tf.variable_scope('votes'):
# Create the transformation matrix (weights)
# weight_shape = [1, num_input_capsules, self.num_capsules, pose_width, pose_height]
# self.pose_weights = weight_variable(weight_shape, 'W_'+self.name, trainable=trainable, is_conv=False)
# size of the multiplication
vote_size = self.batch_size*spatial_width*spatial_height
# Tile the weight matrix and poses by the batch size
# w = tf.tile(self.pose_weights, [vote_size, 1,1,1,1])
# reshaped_poses = tf.reshape(input_poses, shape=[vote_size, kernel_width*kernel_height*num_input_capsules, 1, pose_width, pose_height])
# tiled_poses = tf.tile(reshaped_poses, [1, 1, self.num_capsules, 1, 1])
# Calculate the votes
# votes = tf.matmul(tiled_poses, w)
# self.votes = tf.reshape(votes, shape=[self.batch_size, spatial_width, spatial_height, num_input_capsules, self.num_capsules, pose_width*pose_height])
votes = self._transform(input_poses, self.num_capsules, vote_size, pose_width, pose_height)
vote_shape = votes.get_shape()
self.votes = tf.reshape(votes, shape=[self.batch_size, spatial_width, spatial_height, vote_shape[-3].value, vote_shape[-2].value, vote_shape[-1].value]) #self.num_capsules, pose_width*pose_height])
print self.name + " votes shape: " + str(self.votes.get_shape())
# Compute the routing
with tf.variable_scope('routing'):
# Create beta variables for each capsule
self.beta_v = weight_variable([1,1,1,self.num_capsules], 'beta_v_'+self.name, trainable=trainable, is_conv=False)
self.beta_a = weight_variable([1,1,1,self.num_capsules], 'beta_a_'+self.name, trainable=trainable, is_conv=False)
# Use EM routing to compute the pose and activations
poses, self.activations, routing = self._routing(self.votes, input_activations)
self.routing = routing
# Reshape the pose matrix
pose_shape = poses.get_shape()
self.poses = tf.reshape(poses, [pose_shape[0], pose_shape[1], pose_shape[2], pose_shape[3], pose_width, pose_height])
print self.name + " pose shape: " + str(self.poses.get_shape())
print self.name + " activations shape: " + str(self.activations.get_shape())
print self.name + " routing shape: " + str(self.routing.get_shape())
return self.poses, self.activations, self.routing, self.pose_weights, self.beta_v, self.beta_a
class ClassCaps_EM:
"""
A Convolutional Capsule layer, using EM routing
"""
def __init__(self, num_classes, batch_size, **kwargs):
"""
num_classes
batch_size
"""
# Simply hold the parameters for now
self.num_classes = num_classes
self.batch_size = batch_size
self.name = kwargs.get("name", None)
self.num_em_steps = kwargs.get("num_em_steps", 3)
self.epsilon = kwargs.get("epsilon", 1e-8)
# Placeholder for the weights for this layer
self.pose_weights = None
self.beta_v = None
self.beta_a = None
# Placeholder for activations of this layer
self.votes = None
self.routing = None
# BLACK BOX helper functions, modified from jhui.github.io
def _tile(self, input_layer):
"""
Perform tiling and convolution to prepare the input pose and activation to the
correct spatial dimension for voting and EM-routing.
input_layer: a pose layer with shape (N, W, H, C, POSE_W, POSE_H) or
an activation layer with shape (N, W, H, C)
return: a tensor whose dimensions are (N, W, H, K, O)
K = the flattened kernel shape (kernel_width x kernel_height)
O = the flattened pose and/or activation (pose_width x pose_height x num_input_capsules)
"""
# Extract relevent sizes from the input
input_shape = input_layer.get_shape()
input_width = input_shape[1].value
input_height = input_shape[2].value
num_input_capsules = input_shape[3].value
kernel_width, kernel_height = self.kernel_shape
if len(input_shape) > 5: # Is this a pose tensor?
output_channel_size = num_input_capsules*input_shape[4].value*input_shape[5].value
else: # An activation tensor
output_channel_size = num_input_capsules
# Flatten the input so that it is (?, W, H, OUT_C)
input_flat = tf.reshape(input_layer, shape=[-1, input_width, input_height, output_channel_size])
# Create the tile filter operation
tile_filter = np.zeros(shape=[kernel_width, kernel_height, num_input_capsules, kernel_width*kernel_height], dtype=np.float32)
for i in range(kernel_width):
for j in range(kernel_height):
tile_filter[i,j,:,i*kernel_height + j] = 1.0
tile_filter_op = tf.constant(tile_filter, dtype=tf.float32)
# Perform the tiling
output = tf.nn.depthwise_conv2d(input_flat, tile_filter_op, strides=[1, self.stride, self.stride, 1], padding='VALID')
# Get the width and height of the output
output_shape = output.get_shape()
output_width = output_shape[1].value
output_height = output_shape[2].value
# Put the right numbers in the right places
output = tf.reshape(output, shape=[-1, output_width, output_height, num_input_capsules, kernel_width*kernel_height])
output = tf.transpose(output, perm=[0,1,2,4,3])
return output
def _e_step(self, mean_h, stdev_h, activations, votes):
"""
Perform an expectation step, i.e., the routing assignment
mean_h: (N, OW, OH, 1, OC, PW*PH)
stdev_h: (N, OW, OH, 1, OC, PW*PH)
activations: (N, OW, OH, 1, OC, 1)
votes: (N, OW, OH, KW x KH x IC, OC, PW*PH)
return: routing
"""
# We are calculating the log probability for P
o_p0 = -tf.reduce_sum(tf.square(votes - mean_h) / (2*tf.square(stdev_h)), axis=-1, keep_dims=True)
o_p1 = -tf.reduce_sum(tf.log(stdev_h + self.epsilon), axis=-1, keep_dims=True)
# o_p is the probability density of the h-th component of the vote from i to j
# (N, OW, OH, 1, OC, PWxPH)
o_p = o_p0 + o_p1
# The routing is the softmax of the probability distributions
zz = tf.log(activations + self.epsilon) + o_p
routing_assignments = tf.nn.softmax(zz, dim=len(zz.get_shape().as_list())-2)
return routing_assignments
def _m_step(self, routing_assignments, votes, activations, beta_v, beta_a, temperature):
"""
routing_assignments: (KW x KH x IC, OC, 1)
votes: (N, OH, OW, KW x KH x IC, OC, PW x PH)
activations: (N, OH, OW, KW x KH x IC, 1, 1)
beta_v: (1, 1, 1, 1, OC, 1)
beta_a: (1, 1, 1, 1, OC, 1)
temperature: lambda
return out_mean, out_stdev, out_activation
"""
routing_prime = routing_assignments * activations
# Sum over all input capulse
routing_prime_sum = tf.reduce_sum(routing_prime, axis=-3, keep_dims=True, name='routing_prime_sum')
# Calculate mean and std_dev for all h
mean_h = tf.reduce_sum(routing_prime * votes, axis=-3, keep_dims=True) / routing_prime_sum
stdev_h = tf.sqrt(tf.reduce_sum(routing_prime * tf.square(votes - mean_h), axis=-3, keep_dims=True) / routing_prime_sum)
# Calculate cost
cost_h = (beta_v + tf.log(stdev_h + self.epsilon)) * routing_prime_sum
# The relative variance between each channel determines which one should activate
cost_sum = tf.reduce_sum(cost_h, axis=-1, keep_dims=True)
cost_mean = tf.reduce_mean(cost_sum, axis=-2, keep_dims=True)
cost_stdev = tf.sqrt(tf.reduce_sum(tf.square(cost_sum - cost_mean), axis=-2, keep_dims=True)/ cost_sum.get_shape().as_list()[-2])
cost_h = beta_a + (cost_mean - cost_sum) / (cost_stdev + self.epsilon)
# Activation - sigmoid(lambda * (beta_a - sum(cost)))
out_activation = tf.sigmoid(temperature * cost_h)
return mean_h, stdev_h, out_activation
def _routing(self, votes, activations):
"""
votes: (N, OW, OH, KW x KH x IC, OC, PW x PH)
activations: (N, OW, OH, KW x KH x IC)
return: pose, activation
"""
votes_shape = votes.get_shape().as_list()
with tf.variable_scope('em_routing'):
# Create the initial routing assignment as evenly distributed
routing = tf.constant(1.0 / votes_shape[-2], shape=votes_shape[-3:-1] + [1], dtype=tf.float32)
# Expand the dimensions of the activations
activations = activations[..., tf.newaxis, tf.newaxis]
# Similarly for beta_v and beta_a
beta_v = self.beta_v[..., tf.newaxis, :, tf.newaxis]
beta_a = self.beta_a[..., tf.newaxis, :, tf.newaxis]
# Temperature schedule
temp_min = 1.0
temp_max = min(self.num_em_steps, 3.0)
for step in range(self.num_em_steps):
with tf.variable_scope("iteration_%d" % step):
temp = temp_min + (temp_max - temp_min) * step / max(1.0, self.num_em_steps - 1.0)
mean_h, stdev_h, out_activations = self._m_step(routing, votes, activations, beta_v, beta_a, temperature=temp)
if step < self.num_em_steps - 1:
routing = self._e_step(mean_h, stdev_h, out_activations, votes)
# Now that the EM routing is done, calculate the output pose and activations
out_poses = tf.squeeze(mean_h, axis=-3)
out_activations = tf.squeeze(out_activations, axis=[-3,-1])
return out_poses, out_activations, routing
def _transform(self, _input, output_capsule_size, size, pose_width, pose_height, trainable=True):
"""
"""
print _input.get_shape()
print output_capsule_size
print size
print pose_width
print pose_height
num_input_capsules = _input.get_shape()[1].value
output = tf.reshape(_input, shape=[size, num_input_capsules, 1, pose_width, pose_height])
weight_shape = [1, num_input_capsules, output_capsule_size, pose_width, pose_height]
self.pose_weights = weight_variable(weight_shape, 'W_'+self.name, trainable=trainable, is_conv=False)
w = tf.tile(self.pose_weights, [size, 1, 1, 1, 1])
output = tf.tile(output, [1,1,output_capsule_size,1,1])
votes = tf.matmul(output, w)
votes = tf.reshape(votes, [size, num_input_capsules, output_capsule_size, pose_width*pose_height])
return votes
def _coord_addition(self, votes, width, height):
"""
"""
pose_size = votes.get_shape()[-1].value
coordinate_offset_hh = tf.reshape((tf.range(height, dtype=tf.float32) + 0.5) / height, [1, 1, height, 1, 1])
coordinate_offset_h0 = tf.constant(0.0, shape=[1, 1, height, 1, 1], dtype=tf.float32)
coordinate_offset_h = tf.stack([coordinate_offset_h0, coordinate_offset_hh] + [coordinate_offset_h0 for _ in range(pose_size-2)], axis=-1)
coordinate_offset_ww = tf.reshape((tf.range(width, dtype=tf.float32) + 0.5) / width, [1, width, 1, 1, 1])
coordinate_offset_w0 = tf.constant(0.0, shape=[1,width,1,1,1], dtype=tf.float32)
coordinate_offset_w = tf.stack([coordinate_offset_ww, coordinate_offset_w0] + [coordinate_offset_w0 for _ in range(pose_size-2)], axis=-1)
return votes + coordinate_offset_h + coordinate_offset_w
def build(self, pose_layer, activation_layer, trainable=True):
"""
Construct the convolution capsule layer
pose_layer: a primary or convolution capsule layer with shape (N, W, H, C, POSE_W, POSE_H)
activation_layer: (N, W, H, C)
"""
# Some useful numbers in a more legible format
pose_shape = pose_layer.get_shape()
spatial_width = pose_shape[1].value
spatial_height = pose_shape[2].value
num_input_capsules = pose_shape[3].value
pose_width = pose_shape[4].value
pose_height = pose_shape[5].value
with tf.variable_scope(self.name):
# Reshape the tensors for later operations
input_poses = tf.reshape(pose_layer, shape=[self.batch_size*spatial_width*spatial_height, num_input_capsules, pose_width*pose_height])
# Compute the votes
with tf.variable_scope('votes'):
# Create the transformation matrix (weights)
# weight_shape = [1, num_input_capsules, self.num_classes, pose_width, pose_height]
# self.pose_weights = weight_variable(weight_shape, 'W_'+self.name, trainable=trainable, is_conv=False)
# size of the multiplication
vote_size = self.batch_size*spatial_width*spatial_height
# Tile the weight matrix and poses by the batch size
# w = tf.tile(self.pose_weights, [self.batch_size, 1,1,1,1])
# reshaped_poses = tf.reshape(input_poses, shape=[vote_size, num_input_capsules, 1, pose_width, pose_height])
# tiled_poses = tf.tile(reshaped_poses, [1, 1, self.num_classes, 1, 1])
# Calculate the votes
# votes = tf.matmul(tiled_poses, w)
# votes = tf.reshape(votes, shape=[self.batch_size, spatial_width, spatial_height, num_input_capsules, self.num_classes, pose_width*pose_height])
votes = self._transform(input_poses, self.num_classes, vote_size, pose_width, pose_height)
self.votes = tf.reshape(votes, shape=[self.batch_size, spatial_width, spatial_height, num_input_capsules, self.num_classes, pose_width*pose_height])
self.votes = self._coord_addition(self.votes, spatial_width, spatial_height)
print self.name + " votes shape: " + str(self.votes.get_shape())
# Compute the routing
with tf.variable_scope('routing'):
# Create beta variables for each capsule
self.beta_v = weight_variable([1,1,1,self.num_classes], 'beta_v_'+self.name, trainable=trainable, is_conv=False)
self.beta_a = weight_variable([1,1,1,self.num_classes], 'beta_a_'+self.name, trainable=trainable, is_conv=False)
votes_shape = self.votes.get_shape()
votes = tf.reshape(self.votes, shape = [self.batch_size, votes_shape[1]*votes_shape[2]*votes_shape[3], votes_shape[4], votes_shape[5]])
input_activations = tf.reshape(activation_layer, shape=[self.batch_size, votes_shape[1]*votes_shape[2]*votes_shape[3]])
# Use EM routing to compute the pose and activations
poses, self.activations, self.routing = self._routing(votes, input_activations)
# Reshape the pose matrix
pose_shape = poses.get_shape()
self.poses = tf.reshape(poses, [self.batch_size, self.num_classes, pose_width, pose_height])
self.activations = tf.squeeze(self.activations)
self.routing = tf.squeeze(self.routing)
print self.name + " pose shape: " + str(self.poses.get_shape())
print self.name + " activations shape: " + str(self.activations.get_shape())
print self.name + " routing shape: " + str(self.routing.get_shape())
return self.poses, self.activations, self.routing, self.pose_weights, self.beta_v, self.beta_a
class Convolutional:
"""
A Convolutional Layer
"""
def __init__(self, kernel_shape, num_kernels, **kwargs):
"""
Create a holder for the convolutional layer
Arguments:
kernel_size - Size of each kernel
num_kernels - Number of kernels (feature maps) to use
Optional Arguments:
name - A name for the layer. Default is None
stride - Stride of the kernel. Default is 1
padding - Padding type of the kernel, default is "VALID"
One of "SAME" or "VALID"
activation_function - Activation function to use, default is ReLU
"""
# Simply hold on to the parameters for now
self.kernel_shape = kernel_shape
self.num_kernels = num_kernels
self.name = kwargs.get("name", None)
self.stride = kwargs.get("stride", 1)
self.padding = kwargs.get("padding", "VALID")
self.activation_function = kwargs.get("activation_function", tf.nn.relu)
# Placeholder for the weight variable and this layer
self.weights = None
self.bias = None
self.layer = None
def build(self, input_layer, trainable=True):
"""
Construct the layer in tensorflow
"""
with tf.variable_scope(self.name):
# Get the number of input channels
input_shape = input_layer.get_shape()
num_input_channels = input_shape[-1].value
# Create the weights and convolutional layer
weight_shape = [self.kernel_shape[0], self.kernel_shape[1], num_input_channels, self.num_kernels]
self.weights = weight_variable(weight_shape, 'weights', trainable)
self.bias = bias_variable([self.num_kernels], 'bias', trainable)
self.layer = tf.nn.conv2d(input_layer, self.weights, strides=[1, self.stride, self.stride, 1], padding=self.padding) + self.bias
if self.activation_function:
self.layer = self.activation_function(self.layer)
return self.layer, self.weights, self.bias
class FullConnection:
"""
A Fully Connected Layer
"""
def __init__(self, output_size, **kwargs):
"""
Create a fully connected weight matrix
Arguments:
output_size - The output size of the weight matrix
Optional Arguments:
name - A name for the layer. Default is None
"""
# Simply hold on to the parameters for now
self.output_size = output_size
self.name = kwargs.get("name", None)
self.activation_function = kwargs.get("activation_function", tf.nn.relu)
# Placeholder for the resulting layer
self.weights = None
self.bias = None
self.layer = None
def build(self, input_layer, trainable=True):
"""
Construct the layer in tensorflow
"""
with tf.variable_scope(self.name):
# Create a weight matrix
input_size = input_layer.get_shape()[-1].value
self.weights = weight_variable([input_size, self.output_size], 'weights', trainable, False)
self.bias = bias_variable([self.output_size], 'bias', trainable)
# Create the ReLU layer
self.layer = tf.matmul(input_layer, self.weights) + self.bias
if self.activation_function:
self.layer = self.activation_function(self.layer)
return self.layer, self.weights, self.bias
class Flatten:
"""
A Flattening Layer
"""
def __init__(self, **kwargs):
"""
Create a layer which flattens the input
Optional Arguments:
name - A name for the layer. Default is None
"""
# Simply hold on to the parameters for now
self.name = kwargs.get("name", None)
# Placeholder for the resulting layer
self.layer = None
def build(self, input_layer, trainable=True):
"""
Construct the layer in tensorflow
"""
with tf.variable_scope(self.name):
# Determine the size of the input when flattened
input_layer_shape = input_layer.get_shape()[1:].dims
flattened_dimension = reduce(lambda x,y: x*y, input_layer_shape, tf.Dimension(1))
# Create the layer
self.layer = tf.reshape(input_layer, [-1, flattened_dimension.value])
return self.layer, None, None
| danathughes/AtariRL | models/parts.py | Python | mit | 33,275 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def test():
print "Hello,this is list operator class that prepare for snake's body."
class Node(object):
def __init__(self,x,y,p=0):
self.cur_x = x
self.cur_y = y
# self.cur_dir = dir
self.next = p
class LinkList(object):
def __init__(self):
self.head = 0
def __getitem__(self, key):
if self.is_empty():
print 'linklist is empty.'
return
elif key <0 or key > self.getlength():
print 'the given key is error'
return
else:
return self.getitem(key)
def __setitem__(self, key, item):
if self.is_empty():
print 'linklist is empty.'
return
elif key <0 or key > self.getlength():
print 'the given key is error'
return
else:
self.delete(key)
return self.insert(key)
def initlist(self,data):
self.head = Node(data[0].cur_x,data[0].cur_y)
p = self.head
for i in data[1:]:
node = Node(i.cur_x,i.cur_y)
p.next = node
p = p.next
def getlength(self):
p = self.head
length = 0
while p!=0:
length+=1
p = p.next
return length
def is_empty(self):
if self.getlength() ==0:
return True
else:
return False
def clear(self):
self.head = 0
def append(self,item):
q = Node(item.cur_x,item.cur_y)
if self.head ==0:
self.head = q
else:
p = self.head
while p.next!=0:
p = p.next
p.next = q
def getitem(self,index):
if self.is_empty():
print 'Linklist is empty.'
return
j = 0
p = self.head
while p.next!=0 and j <index:
p = p.next
j+=1
if j ==index:
return p
else:
print 'target is not exist!'
def insert(self,index,item):
if self.is_empty() or index<0 or index >self.getlength():
print 'Linklist is empty.'
return
if index ==0:
q = Node(item.x,item.y,self.head)
self.head = q
p = self.head
post = self.head
j = 0
while p.next!=0 and j<index:
post = p
p = p.next
j+=1
if index ==j:
q = Node(item.x,item.y,item.dir,p)
post.next = q
q.next = p
def delete(self,index):
if self.is_empty() or index<0 or index >self.getlength():
print 'Linklist is empty.'
return
if index ==0:
q = Node(item.x,item.y,item.dir,self.head)
self.head = q
p = self.head
post = self.head
j = 0
while p.next!=0 and j<index:
post = p
p = p.next
j+=1
if index ==j:
post.next = p.next
def index(self, value):
if self.is_empty():
print 'Linklist is empty.'
return
p = self.head
i = 0
while p.next!=0 and not p.x == value:
p = p.next
i+=1
if p.x == value:
return i
else:
return -1
| LHMike/RPi-snake | linkList.py | Python | apache-2.0 | 3,413 |
"""
srt.annotation module
Classes to support reading in tab-delimited annotation files,
e.g. biomart, gff, ...
"""
from srt.core import *
import sys, re, warnings
from exceptions import NotImplementedError
from srt.intervals import Interval,Intersector
from srt.useful import smartopen
def loadAnnotationList(filename, columns, converters=None, header=True, separator="\t", **kw):
"""Basic annotation loader; return the annotation as a list
If no converter is passed, an attempt is made to automatically build one.
Columns named start and end are converted to ints. Column named strand is
converted from 1/-1 to +/-.
@param filename: File name
@param columns: Names of columns/fields
@keyword converters: List of field converting functions in same order as columns (default None)
@keyword header: Skip first line (default True)
@keyword separator: Column delimiter (default "\\t")
"""
reader = AnnotationReader(filename,columns,converters=converters,header=header,separator=separator,**kw)
annotation = []
for gene in reader:
annotation.append(gene)
return annotation
def hashListOnAccession(annotation, geneIdAttribName="geneId",
startAttribName="start", endAttribName="end"):
"""Hash the annotation list on gene accession and sort annotations for each transcript by length
@param annotation: Annotation list
@param geneIdAttribName: Name of geneId field (default "geneId")
@param startAttribName: Name of start field (default "start")
@param endAttribName: Name of end field (default "end")
"""
annotationDict = {}
for gene in annotation:
geneId = gene[geneIdAttribName]
try:
annotationDict[geneId].append(gene)
except KeyError:
annotationDict[geneId] = [gene]
for geneId in annotationDict:
annotationDict[geneId].sort(key=lambda x: -abs(x[endAttribName]-x[startAttribName]))
return annotationDict
def loadAnnotationIntersectors(filename, columns, converters=None,
header=True, separator="\t", referenceColumn="chrom", startColumn="start",
endColumn="end", strandColumn="strand", pad=0, returnAnnotation=False, **kw):
"""Load annotation into Intersectors
@param filename: File name
@param columns: Names of columns in annotation file (list)
@keyword converters: List of field converting functions in same order as columns (default None)
@keyword header: Skip first line (default True)
@keyword separator: Column delimiter (default "\\t")
@keyword referenceColumn: Reference column/attribute name (default "chrom")
@keyword startColumn: Start column/attribute name (default "start")
@keyword endColumn: End column/attribute name (default "end")
@keyword strandColumn: Strand column/attribute name (default "strand")
@keyword pad: Pad the annotation with this much sequence
@keyword returnAnnotation: Return annotation; function returns a tuple (default False)
"""
annotation = loadAnnotationList(filename,columns,converters=converters,header=header,separator=separator,**kw)
geneIntersectors = {}
for gene in annotation:
chrom = gene[referenceColumn]
strand = gene[strandColumn]
start = gene[startColumn]
end = gene[endColumn]
if start>end:
start,end = end,start
elif start==end:
end += 1
key = chrom
interval = Interval(start-pad, end+pad, value=gene)
try:
geneIntersectors[key].add_interval(interval)
except:
geneIntersectors[key] = Intersector()
geneIntersectors[key].add_interval(interval)
if returnAnnotation:
return geneIntersectors, annotation
else:
return geneIntersectors
class Feature(object):
"""Class for representing features on a sequence.
Standard fields are name,chrom,start,end and fields (which name these).
More fields can be added.
"""
def __init__(self, columns=None):
"""Constructor"""
self.name = ""
self.chrom = ""
self.start = -1
self.end = -1
if not columns:
self.fields = ["name","chrom","start","end"]
else:
self.fields = columns
def __getitem__(self, key):
return self.__dict__[key]
def __repr__(self):
output = []
for field in self.fields:
output.append(str(self.__dict__[field]))
return '\t'.join(output)
def asInterval(self):
"""Represent the feature as an Interval object"""
return Interval(self.start, self.end, value=self)
def AnnotationFile(iFileHandle, columns, converters=None, header=True, separator="\t", **kw):
"""Generic interface to annotation readers and writers.
Additional keywords are passed to Reader or Writer object.
@param iFileHandle: File name or object
@param columns: Names of columns/fields
@keyword converters: List of field converting functions in same order as columns (default None)
@keyword header: Skip first line (default True)
@keyword separator: Column delimiter (default "\\t")
"""
return AnnotationReader(iFileHandle, columns, converters=converters, header=header, separator=separator, **kw)
class AnnotationReader(AbstractDataReader):
"""
Class supporting the reading of annotation files.
"""
def __init__(self, iFileHandle, columns, converters=None, header=True, separator='\t', **kw):
"""
Constructor
@param iFileHandle: File name or object
@keyword columns: List of column names (default None)
@keyword converters: List of types or conversion functions (default None)
@keyword header: Skip header line (default True)
@keyword separator: Column separator/delimiter (default '\\t')
"""
self.iFile = smartopen(iFileHandle)
self.columns = columns
if converters==None:
converters = []
for name in columns:
if "start" in name or "end" in name:
converters.append(int)
elif "strand" in name:
converters.append(strandConverter)
else:
converters.append(None)
elif type(converters)==list and type(converters[0])==tuple: # list of tuples
converters = dict(converters)
converters = [converters.get(column) for column in columns]
self.converters = converters
self.separator = separator
if header:
headerline = self.iFile.readline()
if 'fields' in kw:
warnings.warn("Deprecated keyword fields. Use columns",
category=DeprecationWarning)
self.columns = kw['fields']
if 'sep' in kw:
warnings.warn("Deprecated keyword fields. Use columns",
category=DeprecationWarning)
self.separator = kw['sep']
def initFromDict(self, d):
"""Set column names and converters using a dictionary"""
self.columns,self.converters = d.keys(),d.values()
def _generator(self):
data = []
for line in self.iFile:
tokens = line.strip().split(self.separator)
f = Feature(self.columns)
for field,converter,token in zip(self.columns, self.converters, tokens):
# print "_generator", field,converter,token
try:
f.__dict__[field] = converter(token)
except:
f.__dict__[field] = token
yield f
def DelimitedDataFile(iFileHandle, mode="r", **kw):
"""Factory function for Delimited Data Reader and Writer classes
@param iFileHandle: file name or object
@keyword mode: file mode (r,w,a)
"""
if "r" in mode:
return DelimitedDataReader(iFileHandle, **kw)
elif "w" in mode or "a" in mode:
return DelimitedDataWriter(iFileHandle, mode=mode, **kw)
class DelimitedDataReader(object):
def __init__(self, iFileHandle, header=False, separator="\t"):
"""
Constructor
@param iFileHandle: File name or object
@keyword header: Skip header line (default False)
@keyword separator: Column separator/delimiter (default '\\t')
"""
self.iFile = smartopen(iFileHandle)
self.separator = separator
if header:
headerline = self.iFile.readline()
def __iter__(self):
self._iter = self._generator()
return self
def next(self):
for x in self._iter:
return x
raise StopIteration
def _generator(self):
for line in self.iFile:
yield line.strip().split(self.separator)
class DelimitedDataWriter(AbstractDataFile):
"""Simple class for writing delimited files"""
def __init__(self, fileHandle, header=None, separator="\t", mode='w', **kw):
"""
@param iFileHandle: Output file or name
@keyword mode: File mode - write(w) or append(a)
"""
assert mode in ('w', 'a')
self.iFile = smartopen(fileHandle, mode)
self.iFilename = self.iFile.name
self.separator = separator
if header:
self.iFile.write(header + "\n")
def write(self, row):
"""Write a row
@param row: list of row entries
"""
self.iFile.write(self.separator.join([str(_) for _ in row]) + "\n")
def __call__(self, row):
"""Call interface to write.
@param row: list of row entries
"""
self.write(row)
def strandConverter(istrand):
"""Convert strand from '1'/'-1' to '+'/'-'. Also handles integers.
@param istrand: strand (integer)
@return: '+' or '-'
"""
if istrand in ['1', '+1', 1]:
strand = '+'
elif istrand in ['-1', -1]:
strand = '-'
else:
raise Exception('Inappropriate strand value %s' % str(strand))
return strand
def readGeneAnnotationTest():
import time
filename = "/Users/papenfuss/databases/platypus/annotation/ensembl/Release54/mart_gene_location.txt"
columns = ["geneId", "chrom", "start", "end", "strand"]
genes = loadAnnotationList(filename, columns)
print genes[0:10]
print
time.sleep(3)
print hashListOnAccession(genes)
print
time.sleep(3)
geneIntersectors = loadAnnotationIntersectors(filename, columns)
print geneIntersectors[("Ultra187", "+")].find(1, 1000000)
if __name__=="__main__":
readGeneAnnotationTest()
| PapenfussLab/Srtools | srt/annotation.py | Python | artistic-2.0 | 10,820 |
from workflow_diagnostics import get_diagnostics_dict
from workflow_util import upload_to_s3
from sklearn import preprocessing
import cPickle as pickle
import pandas as pd
import os
def run_model(training, testing, features, outcome, clf,
clf_name, normalize=True, verbose=True):
# NOTE: You should set the clf seed ahead of time
if verbose:
print 'Starting training of: {}'.format(clf_name)
print '----------'
print 'Num Features: {}'.format(len(features))
print 'Shape of Training: {}'.format(training.shape)
print 'Shape of Testing: {}'.format(testing.shape)
print 'Outcome: {}'.format(outcome)
X_train, y_train = training[features].values, training[outcome].values
X_test = testing[features].values
if normalize:
X_train = preprocessing.StandardScaler().fit(X_train).transform(X_train)
X_test = preprocessing.StandardScaler().fit(X_test).transform(X_test)
fitted_clf = clf.fit(X_train, y_train)
if verbose:
print 'Finished Training'
print '\n'
print 'Starting Testing:'
print '----------'
predicted_probabilities = fitted_clf.predict_proba(X_test)
if verbose:
print 'Finished Testing...\n'
return fitted_clf, predicted_probabilities
def run_and_output_model_to_s3(training, testing, features, outcome, clf, clf_name, s3_path,
verbose=True, **kwargs):
fitted_clf, predicted_probs = run_model(training, testing, features, outcome, clf,
clf_name, verbose)
#Pickling happens here
os.mkdir('../results/temp/')
filepath = os.path.join('../results/temp', clf_name + '.pkl')
pickle.dump(fitted_clf, open(filepath, 'wb'))
print 'Uploading to S3 at {}'.format(s3_path)
upload_to_s3('../results/temp', clf_name + '.pkl', s3_path = s3_path)
print 'Done uploading {} to s3 \n'.format(filepath)
os.remove(filepath)
os.rmdir('../results/temp/')
# Putting the diagnostics dict into a dataframe and saving to results folder
diagnostics_dict = get_diagnostics_dict(fitted_clf, testing, features, outcome, clf_name, **kwargs)
results_df = pd.read_csv('../results/results.csv')
results_df = results_df.append([diagnostics_dict])
results_df.to_csv(path_or_buf='../results/results.csv', index=False)
return diagnostics_dict
| carlshan/ml_workflow | datascience_tools/modeling/workflow_model_setup.py | Python | mit | 2,346 |
"""
Objects which global optimization solvers.
"""
# pylint: disable=wildcard-import
from .bayesopt import *
from . import bayesopt
from . import functions
__all__ = []
__all__ += bayesopt.__all__
| jhartford/pybo | pybo/__init__.py | Python | bsd-2-clause | 200 |
from sklearn.naive_bayes import GaussianNB
from sklearn.naive_bayes import MultinomialNB
from sklearn.naive_bayes import BernoulliNB
from sklearn.cross_validation import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
import pandas as pd
df = pd.read_csv('dataset/winequality-red.csv', header=0, sep=';')
X = df[list(df.columns)[:-1]]
y = df['quality']
X_train, X_test, y_train, y_test = train_test_split(X, y)
modelg = GaussianNB()
modelg.fit(X_train, y_train)
y_predict = modelg.predict(X_test)
print "GausseanNB Score:" + str(modelg.score(X_test, y_test))
mse = mean_squared_error(y_predict, y_test)
print "RMSE:" + str(mse ** 0.5)
modelm = MultinomialNB()
modelm.fit(X_train, y_train)
y_predict = modelm.predict(X_test)
print "MultinomialNB Score:" + str(modelm.score(X_test, y_test))
mse = mean_squared_error(y_predict, y_test)
print "RMSE:" + str(mse ** 0.5)
modelb = BernoulliNB()
modelb.fit(X_train, y_train)
y_predict = modelb.predict(X_test)
print "BernoulliNB Score: " + str(modelb.score(X_test, y_test))
mse = mean_squared_error(y_predict, y_test)
print "RMSE:" + str(mse ** 0.5) | behrtam/wine-quality-prediction | naive-red.py | Python | mit | 1,207 |
#!/bin/python3
"""Find-Digits.py: determine how many digits evenly divide N"""
__author__ = "Sunil"
__copyright__ = "Copyright 2015, hacker_rank Project"
__license__ = "MIT"
__version__ = "1.0.0"
__email__ = "sunhick@gmail.com"
if __name__ == '__main__':
testcases = int(input().strip())
for testcase in range(testcases):
number = int(input().strip())
print(len([i for i in map(int, list(str(number)))
if i != 0 and number % i == 0]))
"""
tmp = number
count = 0
while tmp > 0:
remainder = tmp % 10
if remainder != 0 and number % remainder == 0:
count += 1
tmp = int(tmp / 10)
print(count)
"""
| Sunhick/hacker_rank | Algorithms/Implementation/Find-Digits.py | Python | mit | 745 |
"""
Camera Motion Compensation
==========================
Generate a motion-stabilized video in which the camera motion is compensated.
Main function: `generate_stabilized_video`
"""
import sys
import os
import shutil
import tempfile
import subprocess
from glob import glob
import numpy as np
import cv2
import hsi
from camocomp.control_points import gen_pairwise_surf_control_points
from camocomp.extract_valid_track import extract_track
def exec_shell(cmd_line, raise_on_err=False):
""" Execute a shell statement in a subprocess
Parameters
----------
cmd_line: string,
the command line to execute (verbatim)
raise_on_err: boolean, optional, default: False,
whether to raise ValueError if something was dumped to stderr
Returns
-------
stdout, stderr: strings containing the resulting output of the command
"""
out, err = subprocess.Popen(
cmd_line,
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
).communicate()
if raise_on_err and err != "":
raise ValueError("Error: cmd={}, stderr={}".format(cmd_line, err))
return out.strip(), err.strip()
def duplicate_nonzero_img(img_fn):
""" Make a copy of an image such that it has no 0-valued pixels
Parameters
----------
img_fn: string,
path to an image file
Returns
-------
out_img_fn: string,
path to the output jpg image
Notes
-----
Useful for the thresholding step in `extract_track`.
"""
img = cv2.imread(img_fn)
img[img == 0] = 5
out_img_fn = img_fn[:-4] + '_dup.jpg'
cv2.imwrite(out_img_fn, img)
return out_img_fn
def pto_gen(img_fns, hfov, out_pto="project.pto"):
""" Generate a Hugin .pto project file
Parameters
----------
img_fns: list,
the (ordered) full paths to the video frames
hfov: int,
horizontal field of view in degrees
(around 50 is ok for most non-fish-eye cameras)
out_pto: string, optional, default: 'project.pto',
output path to the generated panotools .pto file
Notes
-----
Suitable as input for further tools such as the cpfind control-point
generator.
Inspired from pto_gen
(http://hugin.sourceforge.net/docs/html/pto__gen_8cpp-source.html)
but with some hacks to correct the generated m-line in the header.
Uses the Hugin python scripting interface
(http://wiki.panotools.org/Hugin_Scripting_Interface).
"""
# projection type: 0 == rectilinear (2 == equirectangular)
projection = 0
assert projection >= 0, "Invalid projection number (%d)" % projection
assert 1 <= hfov <= 360, "Invalid horizontal field of view (%d)" % hfov
# hugin Panorama object
pano = hsi.Panorama()
# add the images in order
for img_fn in img_fns:
src_img = hsi.SrcPanoImage(img_fn)
src_img.setProjection(projection)
src_img.setHFOV(hfov)
src_img.setExifCropFactor(1.0)
pano.addImage(src_img)
# check we added all of them
n_inserted = pano.getNrOfImages()
assert n_inserted == len(img_fns), "Didn't insert all images (%d < %d)" % \
(n_inserted, len(img_fns))
# output the .pto file
pano.writeData(hsi.ofstream(out_pto + '.tmp')) # same as pano.printPanoramaScript(...)
# some bug in header: rewrite it manually (TODO through hsi?)
with open(out_pto + '.tmp', 'r') as tmp_ff:
with open(out_pto, 'w') as ff:
# re-write the header
ff.write(tmp_ff.readline())
ff.write(tmp_ff.readline())
# force jpeg for the p-line
p_line = tmp_ff.readline().strip().split()
assert p_line[0] == 'p', "BUG: should be a p-line"
ff.write(' '.join(p_line[:7]) + ' n"JPEG q100"\n')
# remove extra 'f' param in the m-line (screws everything up if left here...)
m_line = tmp_ff.readline().strip().split()
assert m_line[0] == 'm', "BUG: should be a m-line"
ff.write(' '.join(m_line[:3]) + ' ' + ' '.join(m_line[4:]) + '\n')
# write all other lines
for l in tmp_ff.readlines():
ff.write(l)
os.remove(out_pto + '.tmp')
def optimize_geometry(proj_file, optim_vars, optim_ref_fov=False):
""" Optimise the geometric parameters
Parameters
----------
proj_file: string,
the path to the Hugin project file (.pto)
optim_vars: string,
the set of variables to optimize for (separated by '_')
- v: view point
- p: pitch
- y: yaw
- r: roll
- Tr{X,Y,Z}: translation
optim_ref_fov: boolean, optional, default: False,
whether to optimize the input's reference horizontal field
of view (risky)
Returns
-------
optim_proj_file: string,
path of the Hugin project file containing the optimized
values for the desired variables.
Notes
-----
This is (by far) the most time consuming operation
(because of hugin's autooptimiser).
This is also the most likely function where the compensation process tends
to fail.
"""
optim_proj_file = proj_file + '.optim.pto'
# modify the input pto to specify the optimization variables
pano = hsi.Panorama()
pano.readData(hsi.ifstream(proj_file))
n_imgs = pano.getNrOfImages()
var_tup = tuple(optim_vars.split('_'))
for v in var_tup:
assert v in ('v', 'p', 'y', 'r', 'TrX', 'TrY', 'TrZ'), \
"Unknown var {0} in {1}".format(v, optim_vars)
optim_opts = [var_tup] * n_imgs # fov, pitch, roll, yaw
if optim_ref_fov:
# optim only field of view for 1st (reference) frame
optim_opts[0] = ('v')
# Note: do not optim. pitch, roll and yaw for this one, weird otherwise
else:
# 1st reference frame has the same fov as those of the input images
optim_opts[0] = ()
pano.setOptimizeVector(optim_opts)
pano.writeData(hsi.ofstream(proj_file))
# perform the optimization (TODO through hsi?)
cmd = "autooptimiser -n -s -o {opto} {pto}" # leveling can screw things up
exec_shell(cmd.format(pto=proj_file, opto=optim_proj_file))
# check for too large output (e.g. too wide panning or traveling)
pano = hsi.Panorama()
pano.readData(hsi.ifstream(optim_proj_file))
opts = pano.getOptions()
oh = opts.getHeight()
ow = opts.getWidth()
if oh * ow > 1e3 * 5e3:
raise ValueError(
"Degenerate case: too big output size ({0}, {1})\n".format(ow, oh) + \
"May be caused by too large panning or translations\n" + \
"=> Possible fixes: use a different field of view parameter or " + \
"optimize only for different variables than {0}\n".format(optim_vars))
return optim_proj_file
# TODO also estimate the camera motion from the bounding boxes
def warp_crop_and_generate_video(out_avi, optim_proj_file, n_imgs,
crop_out=False, out_codec='mjpeg',
tmp_dir='/tmp'):
""" Generate a stabilized and cropped video from remapped frames
Parameters
----------
out_avi: string,
output path to the generated motion-stabilized video
optim_proj_file: string,
path of the Hugin project file containing the optimized
values for the desired variables.
(obtained from the `optimize_geometry` function)
n_imgs: int,
number of images in the video
crop_out: boolean, optional, default: False,
automatically crop the output video.
out_codec: string, optional, default: 'mjpeg',
video codec to use for the output video
tmp_dir: string, optional, default: '/tmp'
temporary directory
"""
# remapping to create the distorted frames in the full scene plane
# (TODO through hsi?)
rimgt = tmp_dir + '/remapped%04d.tif'
cmd = "nona -m TIFF_m -o {tmp_dir}/remapped {pto}"
exec_shell(cmd.format(tmp_dir=tmp_dir, pto=optim_proj_file))
if crop_out:
# get the bounding boxes for all frames
bboxes = extract_track([rimgt % i for i in range(n_imgs)], bb_format='tblr')
# get the global bounding box
gt = np.min(bboxes[:, 0])
gb = np.max(bboxes[:, 1])
gl = np.min(bboxes[:, 2])
gr = np.max(bboxes[:, 3])
# make it a multiple of 2 (needed for some codecs like h.264)
bboxes[:, 1] -= (gb - gt) % 2
bboxes[:, 3] -= (gr - gl) % 2
gb -= (gb - gt) % 2
gr -= (gr - gl) % 2
# crop all tiff files with the same global bbox
for i in range(n_imgs):
# load and crop color image
img = np.ascontiguousarray(cv2.imread(rimgt % i)[gt:gb, gl:gr, :])
# overwrite previous version with new cropped one
cv2.imwrite(rimgt % i, img)
# correct the bounding boxes to be wrt to the globally cropped frames
bboxes[:, :2] -= gt
bboxes[:, 2:4] -= gl
# temporal smoothing
# should do it in later stages only if necessary
#if n_imgs > 11:
# from scipy.ndimage.filters import convolve1d
# win = np.hanning(11)
# win /= win.sum()
# bboxes = convolve1d(bboxes, win, axis=0)
# save the bboxes in ulwh format
track = np.copy(bboxes)
track[:, [0, 1]] = bboxes[:, [2, 0]]
track[:, 2] = bboxes[:, 3] - bboxes[:, 2]
track[:, 3] = bboxes[:, 1] - bboxes[:, 0]
out_track = out_avi[:-4] + '.npy'
np.save(out_track, track)
print "saved {0}".format(out_track)
# generate the warped and cropped video
# Note: directly cropping in ffmpeg (-croptop ...) doesn't work
# TODO try: -vf crop=...?
cmd = "ffmpeg -y -f image2 -i {rit} -vcodec {codec} -qscale 0 -r 25 -an {avi}"
exec_shell(cmd.format(rit=rimgt, codec=out_codec, avi=out_avi))
print "saved {0}".format(out_avi)
def generate_stabilized_video(input_media, optim_vars='v_p_y', hfov=40,
out_avi="out.avi", crop_out=False):
""" Motion stabilize a video using a stitching technique
Parameters
----------
input_media: string or list,
a video file name or the (ordered) paths to the video frames
optim_vars: string, optional, default: 'v_p_y',
geometric parameters to optimize (separated by '_')
- v: view point
- p: pitch
- y: yaw
- r: roll
- Tr{X,Y,Z}: translation
hfov: int, optional, default: 40,
horizontal field of view in degrees
(around 40-60 is ok for most non-fish-eye cameras)
out_avi: string, optional, default: 'out.avi',
output path to the generated motion-stabilized video
crop_out: boolean, optional, default: False,
automatically crop the output video.
"""
# create a temporary directory
tmp_pref = 'tmp_mostab_' + out_avi.split('/')[-1] + '_'
tmp_dir = tempfile.mkdtemp(prefix=tmp_pref, dir='.')
try:
# get the frames if necessary
if isinstance(input_media, str):
input_media = [input_media]
if len(input_media) == 1:
if input_media[0][-4:] in ('.avi', 'mpg', '.mp4'):
# input arg == a video: dumps its frames
cmd = 'ffmpeg -i {} -f image2 -qscale 0 {}/origframe-%06d.jpg'.format(input_media[0], tmp_dir)
exec_shell(cmd)
img_fns = sorted(glob('{}/origframe-*.jpg'.format(tmp_dir)))
else:
# input arg: assume its directory containing jpg's or png's
img_dir = input_media[0]
img_fns = sorted(glob('{}/*.jpg'.format(img_dir)))
if len(img_fns) <= 0:
img_fns = sorted(glob('{}/*.png'.format(img_dir)))
#otherwise a list of images was given
else:
img_fns = input_media
if len(img_fns) <= 0:
raise ValueError('Could not obtain frames from {}'.format(img_fns))
# get the absolute paths
img_fns = map(os.path.abspath, img_fns)
# duplicate images such that there are no 0-valued pixels
img_fns = map(duplicate_nonzero_img, img_fns)
# pano tools project file
proj_file = "%s/project.pto" % tmp_dir
# initialize the pto project
pto_gen(img_fns, hfov=hfov, out_pto=proj_file)
# generate the control points with OpenCV's SURF + RANSAC
gen_pairwise_surf_control_points(proj_file, img_fns)
# prune the control points (TODO through hsi?)
cmd = "cpclean -p -o {pto} {pto}"
exec_shell(cmd.format(pto=proj_file))
# add lines (TODO through hsi?)
cmd = "linefind -o {pto} {pto}"
exec_shell(cmd.format(pto=proj_file))
# optimization of the geometry
optim_proj_file = optimize_geometry(proj_file, optim_vars)
# warp the frames and make a motion stabilized video
warp_crop_and_generate_video(
out_avi, optim_proj_file, len(img_fns), crop_out=crop_out,
tmp_dir=tmp_dir)
finally:
# clean up
shutil.rmtree(tmp_dir)
sys.stdout.flush()
| daien/camocomp | camocomp/motion_compensate.py | Python | bsd-3-clause | 13,563 |
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from oslo import messaging
from neutron.common import constants
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron.i18n import _LE
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as service_constants
LOG = logging.getLogger(__name__)
class L3AgentNotifyAPI(object):
"""API for plugin to notify L3 agent."""
def __init__(self, topic=topics.L3_AGENT):
target = messaging.Target(topic=topic, version='1.0')
self.client = n_rpc.get_client(target)
def _notification_host(self, context, method, payload, host):
"""Notify the agent that is hosting the router."""
LOG.debug('Notify agent at %(host)s the message '
'%(method)s', {'host': host,
'method': method})
cctxt = self.client.prepare(server=host)
cctxt.cast(context, method, payload=payload)
def _agent_notification(self, context, method, router_ids, operation,
shuffle_agents):
"""Notify changed routers to hosting l3 agents."""
adminContext = context if context.is_admin else context.elevated()
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
for router_id in router_ids:
l3_agents = plugin.get_l3_agents_hosting_routers(
adminContext, [router_id],
admin_state_up=True,
active=True)
if shuffle_agents:
random.shuffle(l3_agents)
for l3_agent in l3_agents:
LOG.debug('Notify agent at %(topic)s.%(host)s the message '
'%(method)s',
{'topic': l3_agent.topic,
'host': l3_agent.host,
'method': method})
cctxt = self.client.prepare(topic=l3_agent.topic,
server=l3_agent.host,
version='1.1')
cctxt.cast(context, method, routers=[router_id])
def _agent_notification_arp(self, context, method, router_id,
operation, data):
"""Notify arp details to l3 agents hosting router."""
if not router_id:
return
adminContext = (context.is_admin and
context or context.elevated())
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
l3_agents = (plugin.
get_l3_agents_hosting_routers(adminContext,
[router_id],
admin_state_up=True,
active=True))
# TODO(murali): replace cast with fanout to avoid performance
# issues at greater scale.
for l3_agent in l3_agents:
log_topic = '%s.%s' % (l3_agent.topic, l3_agent.host)
LOG.debug('Casting message %(method)s with topic %(topic)s',
{'topic': log_topic, 'method': method})
dvr_arptable = {'router_id': router_id,
'arp_table': data}
cctxt = self.client.prepare(topic=l3_agent.topic,
server=l3_agent.host,
version='1.2')
cctxt.cast(context, method, payload=dvr_arptable)
def _notification(self, context, method, router_ids, operation,
shuffle_agents):
"""Notify all the agents that are hosting the routers."""
plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if not plugin:
LOG.error(_LE('No plugin for L3 routing registered. Cannot notify '
'agents with the message %s'), method)
return
if utils.is_extension_supported(
plugin, constants.L3_AGENT_SCHEDULER_EXT_ALIAS):
adminContext = (context.is_admin and
context or context.elevated())
plugin.schedule_routers(adminContext, router_ids)
self._agent_notification(
context, method, router_ids, operation, shuffle_agents)
else:
cctxt = self.client.prepare(fanout=True)
cctxt.cast(context, method, routers=router_ids)
def _notification_fanout(self, context, method, router_id):
"""Fanout the deleted router to all L3 agents."""
LOG.debug('Fanout notify agent at %(topic)s the message '
'%(method)s on router %(router_id)s',
{'topic': topics.L3_AGENT,
'method': method,
'router_id': router_id})
cctxt = self.client.prepare(fanout=True)
cctxt.cast(context, method, router_id=router_id)
def agent_updated(self, context, admin_state_up, host):
self._notification_host(context, 'agent_updated',
{'admin_state_up': admin_state_up},
host)
def router_deleted(self, context, router_id):
self._notification_fanout(context, 'router_deleted', router_id)
def routers_updated(self, context, router_ids, operation=None, data=None,
shuffle_agents=False):
if router_ids:
self._notification(context, 'routers_updated', router_ids,
operation, shuffle_agents)
def add_arp_entry(self, context, router_id, arp_table, operation=None):
self._agent_notification_arp(context, 'add_arp_entry', router_id,
operation, arp_table)
def del_arp_entry(self, context, router_id, arp_table, operation=None):
self._agent_notification_arp(context, 'del_arp_entry', router_id,
operation, arp_table)
def router_removed_from_agent(self, context, router_id, host):
self._notification_host(context, 'router_removed_from_agent',
{'router_id': router_id}, host)
def router_added_to_agent(self, context, router_ids, host):
self._notification_host(context, 'router_added_to_agent',
router_ids, host)
| projectcalico/calico-neutron | neutron/api/rpc/agentnotifiers/l3_rpc_agent_api.py | Python | apache-2.0 | 7,120 |
# -*- coding: utf-8 -*-
# Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de>
"""
Test cases for L{upnp.core.service}
"""
import time
try:
import unittest.mock as mock
except ImportError:
import mock
from twisted.trial import unittest
from twisted.internet.defer import Deferred
from coherence.upnp.core import service, device
class DummyDevice:
client = None
friendly_name = 'Dummy Device'
def get_location(self): return "DummyDevice's Location"
def get_urlbase(self): return "DummyDevice's URL base"
def get_id(self):return "DummyDevice's ID"
def make_fullyqualified(self, url):
return "DummyDevice's FQ-URL/" + url
# :todo: put this into a central module
def raiseError(url):
"""
Used for mocking coherence.upnp.core.utils.getPage, behaves as if
the file was not read.
"""
def _raiseError(*args): raise Exception('Meaningless Error')
d = Deferred()
d.addCallback(_raiseError)
return d
# :todo: put this into a central module
def fakeGetPage(content):
def returnEmptyPage(url):
"""
Used for mocking coherence.upnp.core.utils.getPage, behaves as if
the file contains `content`.
"""
d = Deferred()
d.callback((content, {}))
return d
return returnEmptyPage
class DescriptionNotFound(unittest.TestCase):
def setUp(self):
with mock.patch('coherence.upnp.core.utils.getPage', raiseError):
self.setUp_main()
def setUp_main(self):
self.device= DummyDevice()
self.service = service.Service(
'my-service-type', 'my-service-id',
'http://localhost:8080/my-location',
'http://localhost/my-service/control',
'http://localhost/my-service/subscribe',
'http://localhost/my-service/view',
'http://localhost/my-service/scpd',
self.device)
def tearDown(self):
try:
self.service.renew_subscription_call.cancel()
except AttributeError:
pass
def test_init(self):
""" Test initialization of Service() instance """
svc = self.service
self.assertEqual(svc.service_type, 'my-service-type')
self.assertEqual(svc.id, 'my-service-id')
# parameter location goes into url_base
self.assertEqual(svc.control_url,
'http://localhost/my-service/control')
self.assertEqual(svc.event_sub_url,
'http://localhost/my-service/subscribe')
self.assertEqual(svc.presentation_url,
'http://localhost/my-service/view')
self.assertEqual(svc.scpd_url, 'http://localhost/my-service/scpd')
self.assertIs(svc.device, self.device)
# not completed as we have *no* valid description
self.assertFalse(svc.detection_completed)
self.assertEqual(svc._actions, {})
self.assertEqual(svc._variables, {0: {}})
# :fixme: this attribute is unused, remove it
self.assertEqual(svc._var_subscribers, {})
self.assertIs(svc.subscription_id, None)
self.assertEqual(svc.timeout, 0)
# :todo: rethink last_time_updated: maybe better to init with 0
self.assertIs(svc.last_time_updated, None)
self.assertIs(svc.event_connection, None)
self.assertIs(svc.client, None)
self.assertEqual(svc.url_base, 'http://localhost:8080')
def test_scpdXML(self):
svc = self.service
# scpdXML is not set as we have not parsed any description
self.assertRaises(AttributeError, getattr, svc, 'scpdXML')
self.assertRaises(AttributeError, svc.get_scpdXML)
def test_getters(self):
svc = self.service
self.assertIs(svc.get_device(), self.device)
self.assertEqual(svc.get_type(), 'my-service-type')
self.assertEqual(svc.get_id(), 'my-service-id')
self.assertEqual(svc.get_timeout(), 0)
self.assertEqual(svc.get_sid(), None)
self.assertEqual(svc.get_actions(), {})
self.assertEqual(svc.get_state_variables(0), {})
self.assertEqual(
svc.get_control_url(),
"DummyDevice's FQ-URL/http://localhost/my-service/control")
self.assertEqual(
svc.get_event_sub_url(),
"DummyDevice's FQ-URL/http://localhost/my-service/subscribe")
self.assertEqual(
svc.get_presentation_url(),
"DummyDevice's FQ-URL/http://localhost/my-service/view")
self.assertEqual(
svc.get_scpd_url(),
"DummyDevice's FQ-URL/http://localhost/my-service/scpd")
self.assertEqual(svc.get_base_url(), "DummyDevice's FQ-URL/.")
def test_as_dict(self):
""" Test Service.as_dict() """
svc = self.service
self.assertEqual(svc.as_dict(),
{'type': 'my-service-type',
'actions': [] })
def test_as_tuple(self):
""" Test Service.as_tuples() """
svc = self.service
self.assertEqual(svc.as_tuples(), [
("Location", ("DummyDevice's Location", "DummyDevice's Location")),
("URL base", "DummyDevice's URL base"),
("UDN", "DummyDevice's ID"),
("Type", 'my-service-type'),
("ID", 'my-service-id'),
("Service Description URL",
('http://localhost/my-service/scpd',
"DummyDevice's FQ-URL/http://localhost/my-service/scpd")),
("Control URL",
('http://localhost/my-service/control',
"DummyDevice's FQ-URL/http://localhost/my-service/control",
False)),
("Event Subscription URL",
('http://localhost/my-service/subscribe',
"DummyDevice's FQ-URL/http://localhost/my-service/subscribe",
False)),
])
def test_set_timeout(self):
svc = self.service
self.assertEqual(svc.get_timeout(), 0)
svc.set_timeout(654)
self.assertEqual(svc.get_timeout(), 654)
def test_set_sid(self):
svc = self.service
self.assertEqual(svc.get_sid(), None)
svc.set_sid('my-subscription-id')
self.assertEqual(svc.get_sid(), 'my-subscription-id')
class EmptyDescription(DescriptionNotFound):
"""
Same as DescriptionNotFound, except now we pass an empty
description XML. Results should be the same, except for scpdXML.
"""
def setUp(self):
with mock.patch('coherence.upnp.core.utils.getPage',
fakeGetPage('')):
self.setUp_main()
def test_scpdXML(self):
svc = self.service
# scpdXML is empty as we have not parsed any description
self.assertEqual(svc.scpdXML, '')
self.assertEqual(svc.get_scpdXML(), '')
class InvalidDescriptionXML(DescriptionNotFound):
"""
Same as DescriptionNotFound, except now we pass a
description which is invalid XML. Results should be the same,
except for scpdXML.
"""
def setUp(self):
with mock.patch('coherence.upnp.core.utils.getPage',
fakeGetPage('<x>')):
self.setUp_main()
def test_scpdXML(self):
svc = self.service
# :fixme: rethink if invalid scpdXML should really be stored
self.assertEqual(svc.scpdXML, '<x>')
self.assertEqual(svc.get_scpdXML(), '<x>')
_scpdXMLTemplate = '''\
<?xml version="1.0"?>
<scpd xmlns="urn:schemas-upnp-org:service-1-0">
%s
%s
</scpd>
'''
_scdpActions = '''
<actionList>
<action>
<name>GetCurrentConnectionIDs</name>
<argumentList>
<argument>
<name>ConnectionIDs</name>
<direction>out</direction>
<relatedStateVariable>CurrentConnectionIDs</relatedStateVariable>
</argument>
</argumentList>
</action>
</actionList>
'''
_scdpServiceStates = '''
<serviceStateTable>
<stateVariable sendEvents="yes">
<name>SourceProtocolInfo</name>
<dataType>string</dataType>
</stateVariable>
</serviceStateTable>
'''
class CompleteDescription(unittest.TestCase):
_scpdXML = _scpdXMLTemplate % (_scdpActions, _scdpServiceStates)
_expected_actions = [
{'name': 'GetCurrentConnectionIDs',
'arguments':
[{'name': 'ConnectionIDs',
'direction': 'out',
'related_state_variable': 'CurrentConnectionIDs'
}]
}
]
_expected_variables = [
[('Name', 'SourceProtocolInfo'),
('Evented', 'yes'),
('Data Type', 'string'),
('Default Value', ''),
('Current Value', ''),
]
]
def setUp(self):
#from coherence.upnp.core import utils
#utils.parse_xml(self._scpdXML, 'utf-8')
#raise NotImplementedError(self._scpdXML)
info = {
'USN': "RootDevice's USN",
'SERVER': "RootDevice's Server",
'ST': "RootDevice's ST",
'LOCATION': "http://localhost:8080/my-location",
'MANIFESTATION': "RootDevice's Manifestation",
'HOST': "RootDevice's Host",
}
# Create an empty RootDevice without services, actions, icons,
# etc. Do not use DummyDevice here as we want to test URLs,
# too.
with mock.patch('coherence.upnp.core.utils.getPage', raiseError):
self.device = device.RootDevice(info)
# Since the Device has got no description, wenn need to set
# urlbase manually. It is required to be set.
# :fixme: this one is used in make_fullyqualified, rethink
self.device.urlbase = 'http://localhost:8888'
# Create the service we want to test, using meaningful values
with mock.patch('coherence.upnp.core.utils.getPage',
fakeGetPage(self._scpdXML)):
self.service = service.Service(
'urn:schemas-upnp-org:service:RenderingControl:1',
'urn:upnp-org:serviceId:RenderingControl',
self.device.get_location(),
'/my-service/control',
'/my-service/subscribe',
'/my-service/view',
'/my-service/scpd',
self.device)
def tearDown(self):
try:
self.service.renew_subscription_call.cancel()
except AttributeError:
pass
def test_init(self):
""" Test initialization of Service() instance """
svc = self.service
self.assertEqual(svc.service_type,
'urn:schemas-upnp-org:service:RenderingControl:1')
self.assertEqual(svc.id, 'urn:upnp-org:serviceId:RenderingControl')
# parameter location goes into url_base
self.assertEqual(svc.control_url, '/my-service/control')
self.assertEqual(svc.event_sub_url, '/my-service/subscribe')
self.assertEqual(svc.presentation_url, '/my-service/view')
self.assertEqual(svc.scpd_url, '/my-service/scpd')
self.assertIs(svc.device, self.device)
# completed as we have a valid description
self.assertTrue(svc.detection_completed)
self.assertIs(svc.subscription_id, None)
self.assertEqual(svc.timeout, 0)
# :todo: rethink last_time_updated: maybe better to init with 0
self.assertIs(svc.last_time_updated, None)
self.assertIs(svc.event_connection, None)
self.assertIs(svc.client, None)
# :fixme: this one is *not* used in make_fullyqualified, rethink
self.assertEqual(svc.url_base, 'http://localhost:8080')
def test_scpdXML(self):
svc = self.service
self.assertEqual(svc.scpdXML, self._scpdXML)
self.assertEqual(svc.get_scpdXML(), self._scpdXML)
def test_actions(self):
def compare_actions(actions_to_test):
self.assertEqual(len(actions_to_test), len(self._expected_actions))
for i, name in enumerate(actions_to_test):
# Note: This implicitly tests Action.as_dict(), too,
# but saves a lot of code.
self.assertEqual(actions_to_test[name].as_dict(),
self._expected_actions[i])
svc = self.service
compare_actions(svc._actions)
compare_actions(svc.get_actions())
def test_variables(self):
def compare_variables(variables_to_test):
self.assertEqual(len(variables_to_test),
len(self._expected_variables))
for i, name in enumerate(variables_to_test):
# Note: This implicitly tests Variable.as_tuples(),
# too, but saves a lot of code.
self.assertEqual(variables_to_test[name].as_tuples(),
self._expected_variables[i])
svc = self.service
# there is one instance
self.assertEqual(len(svc._variables), 1)
self.assertEqual(svc._variables.keys(), [0])
compare_variables(svc._variables[0])
#compare_variables(svc.get_state_variables(0))
def test_getters(self):
svc = self.service
self.assertIs(svc.get_device(), self.device)
self.assertEqual(svc.get_type(),
'urn:schemas-upnp-org:service:RenderingControl:1')
self.assertEqual(svc.get_id(),
'urn:upnp-org:serviceId:RenderingControl')
self.assertEqual(svc.get_timeout(), 0)
self.assertEqual(svc.get_sid(), None)
self.assertEqual(svc.get_control_url(),
'http://localhost:8888/my-service/control')
self.assertEqual(svc.get_event_sub_url(),
'http://localhost:8888/my-service/subscribe')
self.assertEqual(svc.get_presentation_url(),
'http://localhost:8888/my-service/view')
self.assertEqual(svc.get_scpd_url(),
'http://localhost:8888/my-service/scpd')
self.assertEqual(svc.get_base_url(), "http://localhost:8888/")
class DescriptionWithoutActions(CompleteDescription):
_scpdXML = _scpdXMLTemplate % ('', _scdpServiceStates)
_expected_actions = {}
class DescriptionWithoutVariables(CompleteDescription):
_scpdXML = _scpdXMLTemplate % (_scdpActions, '')
_expected_variables = []
class DescriptionWithoutNamespaceDeclaration(CompleteDescription):
_scpdXML = _scpdXMLTemplate.replace(' xmlns=', ' dummy=') \
% ('', _scdpServiceStates)
_expected_actions = {}
_expected_variables = []
class DescriptionWithWrongNamespace(CompleteDescription):
_scpdXML = _scpdXMLTemplate.replace(' xmlns="urn:', ' xmlns="dummy:') \
% ('', _scdpServiceStates)
_expected_actions = {}
_expected_variables = []
# :todo: test-cases for subscribe/unsubscribe, subscribe_for_variable
# :todo: test-cases for process_event()
# :todo: test-cases for ServiceServer
# :todo: test-cases for scpdXML
# :todo: test-cases for ServiceControl
| coherence-project/Coherence | coherence/upnp/core/test/test_service.py | Python | mit | 15,115 |
#pylint: disable=W0102,C0103
import os
import threading
from traceback import print_exc
from BitTornado.Meta.BTTree import BTTree
from BitTornado.Meta.Info import MetaInfo
defaults = [
('announce_list', '',
'a list of announce URLs - explained below'),
('httpseeds', '',
'a list of http seed URLs - explained below'),
('piece_size_pow2', 0,
"which power of 2 to set the piece size to (0 = automatic)"),
('comment', '',
"optional human-readable comment to put in .torrent"),
('filesystem_encoding', '',
"optional specification for filesystem encoding " +
"(set automatically in recent Python versions)"),
('target', '',
"optional target file for the torrent")
]
ignore = ['core', 'CVS']
announcelist_details = \
"""announce_list = optional list of redundant/backup tracker URLs, in the
format:
url[,url...][|url[,url...]...]
where URLs separated by commas are all tried first
before the next group of URLs separated by the pipe is checked.
If none is given, it is assumed you don't want one in the metafile.
If announce_list is given, clients which support it
will ignore the <announce> value.
Examples:
http://tracker1.com|http://tracker2.com|http://tracker3.com
(tries trackers 1-3 in order)
http://tracker1.com,http://tracker2.com,http://tracker3.com
(tries trackers 1-3 in a randomly selected order)
http://tracker1.com|http://backup1.com,http://backup2.com
(tries tracker 1 first, then tries between the 2 backups randomly)
httpseeds = optional list of http-seed URLs, in the format:
url[|url...]"""
def make_meta_file(loc, url, params=None, flag=None,
progress=lambda x: None, progress_percent=True):
"""Make a single .torrent file for a given location"""
if params is None:
params = {}
if flag is None:
flag = threading.Event()
tree = BTTree(loc, [])
# Extract target from parameters
if 'target' not in params or params['target'] == '':
fname, ext = os.path.split(loc)
if ext == '':
target = fname + '.torrent'
else:
target = os.path.join(fname, ext + '.torrent')
params['target'] = target
info = tree.makeInfo(flag=flag, progress=progress,
progress_percent=progress_percent, **params)
if flag is not None and flag.is_set():
return
metainfo = MetaInfo(announce=url, info=info, **params)
metainfo.write(params['target'])
def completedir(directory, url, params=None, flag=None,
progress=lambda x: None, filestat=lambda x: None):
"""Make a .torrent file for each entry in a directory"""
if params is None:
params = {}
if flag is None:
flag = threading.Event()
files = sorted(os.listdir(directory))
ext = '.torrent'
togen = [os.path.join(directory, fname) for fname in files
if (fname + ext) not in files and not fname.endswith(ext)]
trees = [BTTree(loc, []) for loc in togen]
def subprog(update, subtotal=[0], total=sum(tree.size for tree in trees),
progress=progress):
"""Aggregate progress callback
Uses static subtotal to track across files"""
subtotal[0] += update
progress(float(subtotal[0]) / total)
for fname in togen:
filestat(fname)
try:
base = os.path.basename(fname)
if base not in ignore and base[0] != '.':
subparams = params.copy()
if 'target' in params and params['target'] != '':
subparams['target'] = os.path.join(params['target'],
base + ext)
make_meta_file(fname, url, subparams, flag,
progress=subprog, progress_percent=False)
except ValueError:
print_exc()
| jakesyl/BitTornado | BitTornado/Application/makemetafile.py | Python | mit | 4,006 |
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /nodes/ methods.
"""
import datetime
import mock
from oslo.config import cfg
from oslo.utils import timeutils
from six.moves.urllib import parse as urlparse
from testtools.matchers import HasLength
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import rpcapi
from ironic import objects
from ironic.openstack.common import context
from ironic.tests.api import base
from ironic.tests.api import utils as apiutils
from ironic.tests.db import utils as dbutils
from ironic.tests.objects import utils as obj_utils
# NOTE(lucasagomes): When creating a node via API (POST)
# we have to use chassis_uuid
def post_get_test_node(**kw):
node = apiutils.node_post_data(**kw)
chassis = dbutils.get_test_chassis()
node['chassis_id'] = None
node['chassis_uuid'] = kw.get('chassis_uuid', chassis['uuid'])
return node
class TestListNodes(base.FunctionalTest):
def setUp(self):
super(TestListNodes, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
def _create_association_test_nodes(self):
#create some unassociated nodes
unassociated_nodes = []
for id in range(3):
node = obj_utils.create_test_node(self.context,
id=id,
uuid=utils.generate_uuid())
unassociated_nodes.append(node.uuid)
#created some associated nodes
associated_nodes = []
for id in range(3, 7):
node = obj_utils.create_test_node(
self.context, id=id, uuid=utils.generate_uuid(),
instance_uuid=utils.generate_uuid())
associated_nodes.append(node['uuid'])
return {'associated': associated_nodes,
'unassociated': unassociated_nodes}
def test_empty(self):
data = self.get_json('/nodes')
self.assertEqual([], data['nodes'])
def test_one(self):
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes')
self.assertIn('instance_uuid', data['nodes'][0])
self.assertIn('maintenance', data['nodes'][0])
self.assertIn('power_state', data['nodes'][0])
self.assertIn('provision_state', data['nodes'][0])
self.assertIn('uuid', data['nodes'][0])
self.assertEqual(node['uuid'], data['nodes'][0]["uuid"])
self.assertNotIn('driver', data['nodes'][0])
self.assertNotIn('driver_info', data['nodes'][0])
self.assertNotIn('extra', data['nodes'][0])
self.assertNotIn('properties', data['nodes'][0])
self.assertNotIn('chassis_uuid', data['nodes'][0])
self.assertNotIn('reservation', data['nodes'][0])
self.assertNotIn('console_enabled', data['nodes'][0])
self.assertNotIn('target_power_state', data['nodes'][0])
self.assertNotIn('target_provision_state', data['nodes'][0])
self.assertNotIn('provision_updated_at', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_get_one(self):
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/%s' % node['uuid'])
self.assertEqual(node.uuid, data['uuid'])
self.assertIn('driver', data)
self.assertIn('driver_info', data)
self.assertIn('extra', data)
self.assertIn('properties', data)
self.assertIn('chassis_uuid', data)
self.assertIn('reservation', data)
# never expose the chassis_id
self.assertNotIn('chassis_id', data)
def test_detail(self):
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/detail')
self.assertEqual(node['uuid'], data['nodes'][0]["uuid"])
self.assertIn('driver', data['nodes'][0])
self.assertIn('driver_info', data['nodes'][0])
self.assertIn('extra', data['nodes'][0])
self.assertIn('properties', data['nodes'][0])
self.assertIn('chassis_uuid', data['nodes'][0])
self.assertIn('reservation', data['nodes'][0])
self.assertIn('maintenance', data['nodes'][0])
self.assertIn('console_enabled', data['nodes'][0])
self.assertIn('target_power_state', data['nodes'][0])
self.assertIn('target_provision_state', data['nodes'][0])
self.assertIn('provision_updated_at', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_detail_against_single(self):
node = obj_utils.create_test_node(self.context)
response = self.get_json('/nodes/%s/detail' % node['uuid'],
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_many(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context, id=id,
uuid=utils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes')
self.assertEqual(len(nodes), len(data['nodes']))
uuids = [n['uuid'] for n in data['nodes']]
self.assertEqual(sorted(nodes), sorted(uuids))
def test_links(self):
uuid = utils.generate_uuid()
obj_utils.create_test_node(self.context, id=1, uuid=uuid)
data = self.get_json('/nodes/%s' % uuid)
self.assertIn('links', data.keys())
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
for l in data['links']:
bookmark = l['rel'] == 'bookmark'
self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
def test_collection_links(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context, id=id,
uuid=utils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes/?limit=3')
self.assertEqual(3, len(data['nodes']))
next_marker = data['nodes'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context, id=id,
uuid=utils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes')
self.assertEqual(3, len(data['nodes']))
next_marker = data['nodes'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_ports_subresource_link(self):
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/%s' % node.uuid)
self.assertIn('ports', data.keys())
def test_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
for id_ in range(2):
obj_utils.create_test_port(self.context, id=id_, node_id=node.id,
uuid=utils.generate_uuid(),
address='52:54:00:cf:2d:3%s' % id_)
data = self.get_json('/nodes/%s/ports' % node.uuid)
self.assertEqual(2, len(data['ports']))
self.assertNotIn('next', data.keys())
# Test collection pagination
data = self.get_json('/nodes/%s/ports?limit=1' % node.uuid)
self.assertEqual(1, len(data['ports']))
self.assertIn('next', data.keys())
def test_ports_subresource_noid(self):
node = obj_utils.create_test_node(self.context)
obj_utils.create_test_port(self.context, node_id=node.id)
# No node id specified
response = self.get_json('/nodes/ports', expect_errors=True)
self.assertEqual(400, response.status_int)
def test_ports_subresource_node_not_found(self):
non_existent_uuid = 'eeeeeeee-cccc-aaaa-bbbb-cccccccccccc'
response = self.get_json('/nodes/%s/ports' % non_existent_uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
@mock.patch.object(timeutils, 'utcnow')
def test_node_states(self, mock_utcnow):
fake_state = 'fake-state'
fake_error = 'fake-error'
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
node = obj_utils.create_test_node(self.context,
power_state=fake_state,
target_power_state=fake_state,
provision_state=fake_state,
target_provision_state=fake_state,
provision_updated_at=test_time,
last_error=fake_error)
data = self.get_json('/nodes/%s/states' % node.uuid)
self.assertEqual(fake_state, data['power_state'])
self.assertEqual(fake_state, data['target_power_state'])
self.assertEqual(fake_state, data['provision_state'])
self.assertEqual(fake_state, data['target_provision_state'])
prov_up_at = timeutils.parse_isotime(
data['provision_updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, prov_up_at)
self.assertEqual(fake_error, data['last_error'])
self.assertFalse(data['console_enabled'])
def test_node_by_instance_uuid(self):
node = obj_utils.create_test_node(self.context,
uuid=utils.generate_uuid(),
instance_uuid=utils.generate_uuid())
instance_uuid = node.instance_uuid
data = self.get_json('/nodes?instance_uuid=%s' % instance_uuid)
self.assertThat(data['nodes'], HasLength(1))
self.assertEqual(node['instance_uuid'],
data['nodes'][0]["instance_uuid"])
def test_node_by_instance_uuid_wrong_uuid(self):
obj_utils.create_test_node(self.context, uuid=utils.generate_uuid(),
instance_uuid=utils.generate_uuid())
wrong_uuid = utils.generate_uuid()
data = self.get_json('/nodes?instance_uuid=%s' % wrong_uuid)
self.assertThat(data['nodes'], HasLength(0))
def test_node_by_instance_uuid_invalid_uuid(self):
response = self.get_json('/nodes?instance_uuid=fake',
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
def test_associated_nodes_insensitive(self):
associated_nodes = self._create_association_test_nodes().\
get('associated')
data = self.get_json('/nodes?associated=true')
data1 = self.get_json('/nodes?associated=True')
uuids = [n['uuid'] for n in data['nodes']]
uuids1 = [n['uuid'] for n in data1['nodes']]
self.assertEqual(sorted(associated_nodes), sorted(uuids1))
self.assertEqual(sorted(associated_nodes), sorted(uuids))
def test_associated_nodes_error(self):
self._create_association_test_nodes()
response = self.get_json('/nodes?associated=blah', expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_unassociated_nodes_insensitive(self):
unassociated_nodes = self._create_association_test_nodes().\
get('unassociated')
data = self.get_json('/nodes?associated=false')
data1 = self.get_json('/nodes?associated=FALSE')
uuids = [n['uuid'] for n in data['nodes']]
uuids1 = [n['uuid'] for n in data1['nodes']]
self.assertEqual(sorted(unassociated_nodes), sorted(uuids1))
self.assertEqual(sorted(unassociated_nodes), sorted(uuids))
def test_unassociated_nodes_with_limit(self):
unassociated_nodes = self._create_association_test_nodes().\
get('unassociated')
data = self.get_json('/nodes?associated=False&limit=2')
self.assertThat(data['nodes'], HasLength(2))
self.assertTrue(data['nodes'][0]['uuid'] in unassociated_nodes)
def test_next_link_with_association(self):
self._create_association_test_nodes()
data = self.get_json('/nodes/?limit=3&associated=True')
self.assertThat(data['nodes'], HasLength(3))
self.assertIn('associated=True', data['next'])
def test_detail_with_association_filter(self):
associated_nodes = self._create_association_test_nodes().\
get('associated')
data = self.get_json('/nodes/detail?associated=true')
self.assertIn('driver', data['nodes'][0])
self.assertEqual(len(associated_nodes), len(data['nodes']))
def test_next_link_with_association_with_detail(self):
self._create_association_test_nodes()
data = self.get_json('/nodes/detail?limit=3&associated=true')
self.assertThat(data['nodes'], HasLength(3))
self.assertIn('driver', data['nodes'][0])
self.assertIn('associated=True', data['next'])
def test_detail_with_instance_uuid(self):
node = obj_utils.create_test_node(self.context,
uuid=utils.generate_uuid(),
instance_uuid=utils.generate_uuid())
instance_uuid = node.instance_uuid
data = self.get_json('/nodes/detail?instance_uuid=%s' % instance_uuid)
self.assertEqual(node['instance_uuid'],
data['nodes'][0]["instance_uuid"])
self.assertIn('driver', data['nodes'][0])
self.assertIn('driver_info', data['nodes'][0])
self.assertIn('extra', data['nodes'][0])
self.assertIn('properties', data['nodes'][0])
self.assertIn('chassis_uuid', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_maintenance_nodes(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context, id=id,
uuid=utils.generate_uuid(),
maintenance=id % 2)
nodes.append(node)
data = self.get_json('/nodes?maintenance=true')
uuids = [n['uuid'] for n in data['nodes']]
test_uuids_1 = [n.uuid for n in nodes if n.maintenance]
self.assertEqual(sorted(test_uuids_1), sorted(uuids))
data = self.get_json('/nodes?maintenance=false')
uuids = [n['uuid'] for n in data['nodes']]
test_uuids_0 = [n.uuid for n in nodes if not n.maintenance]
self.assertEqual(sorted(test_uuids_0), sorted(uuids))
def test_maintenance_nodes_error(self):
response = self.get_json('/nodes?associated=true&maintenance=blah',
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_maintenance_nodes_associated(self):
self._create_association_test_nodes()
node = obj_utils.create_test_node(self.context,
instance_uuid=utils.generate_uuid(),
maintenance=True)
data = self.get_json('/nodes?associated=true&maintenance=false')
uuids = [n['uuid'] for n in data['nodes']]
self.assertNotIn(node.uuid, uuids)
data = self.get_json('/nodes?associated=true&maintenance=true')
uuids = [n['uuid'] for n in data['nodes']]
self.assertIn(node.uuid, uuids)
data = self.get_json('/nodes?associated=true&maintenance=TruE')
uuids = [n['uuid'] for n in data['nodes']]
self.assertIn(node.uuid, uuids)
def test_get_console_information(self):
node = obj_utils.create_test_node(self.context)
expected_console_info = {'test': 'test-data'}
expected_data = {'console_enabled': True,
'console_info': expected_console_info}
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.return_value = expected_console_info
data = self.get_json('/nodes/%s/states/console' % node.uuid)
self.assertEqual(expected_data, data)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
def test_get_console_information_console_disabled(self):
node = obj_utils.create_test_node(self.context)
expected_data = {'console_enabled': False,
'console_info': None}
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.side_effect = exception.NodeConsoleNotEnabled(
node=node.uuid)
data = self.get_json('/nodes/%s/states/console' % node.uuid)
self.assertEqual(expected_data, data)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
def test_get_console_information_not_supported(self):
node = obj_utils.create_test_node(self.context)
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.side_effect = exception.UnsupportedDriverExtension(
extension='console', driver='test-driver')
ret = self.get_json('/nodes/%s/states/console' % node.uuid,
expect_errors=True)
self.assertEqual(400, ret.status_code)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
def test_get_boot_device(self, mock_gbd):
node = obj_utils.create_test_node(self.context)
expected_data = {'boot_device': boot_devices.PXE, 'persistent': True}
mock_gbd.return_value = expected_data
data = self.get_json('/nodes/%s/management/boot_device' % node.uuid)
self.assertEqual(expected_data, data)
mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
def test_get_boot_device_iface_not_supported(self, mock_gbd):
node = obj_utils.create_test_node(self.context)
mock_gbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
ret = self.get_json('/nodes/%s/management/boot_device' % node.uuid,
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
def test_get_supported_boot_devices(self, mock_gsbd):
mock_gsbd.return_value = [boot_devices.PXE]
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/%s/management/boot_device/supported'
% node.uuid)
expected_data = {'supported_boot_devices': [boot_devices.PXE]}
self.assertEqual(expected_data, data)
mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
def test_get_supported_boot_devices_iface_not_supported(self, mock_gsbd):
node = obj_utils.create_test_node(self.context)
mock_gsbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
ret = self.get_json('/nodes/%s/management/boot_device/supported' %
node.uuid, expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
class TestPatch(base.FunctionalTest):
def setUp(self):
super(TestPatch, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
self.node = obj_utils.create_test_node(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'update_node')
self.mock_update_node = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
self.mock_cnps = p.start()
self.addCleanup(p.stop)
def test_update_ok(self):
self.mock_update_node.return_value = self.node
self.mock_update_node.return_value.updated_at = \
"2013-12-03T06:20:41.184720+00:00"
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.assertEqual(self.mock_update_node.return_value.updated_at,
timeutils.parse_isotime(response.json['updated_at']))
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_update_state(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'power_state': 'new state'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_update_fails_bad_driver_info(self):
fake_err = 'Fake Error Message'
self.mock_update_node.side_effect = exception.InvalidParameterValue(
fake_err)
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/driver_info/this',
'value': 'foo',
'op': 'add'},
{'path': '/driver_info/that',
'value': 'bar',
'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_update_fails_bad_driver(self):
self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/driver',
'value': 'bad-driver',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
def test_update_fails_bad_state(self):
fake_err = 'Fake Power State'
self.mock_update_node.side_effect = exception.NodeInWrongPowerState(
node=self.node['uuid'], pstate=fake_err)
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_ok(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/extra/foo',
'value': 'bar',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_root(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_root_non_existent(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/foo', 'value': 'bar', 'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_ok(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/extra',
'op': 'remove'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_remove_non_existent_property_fail(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/extra/non-existent', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_update_state_in_progress(self):
node = obj_utils.create_test_node(self.context, id=99,
uuid=utils.generate_uuid(),
target_power_state=states.POWER_OFF)
response = self.patch_json('/nodes/%s' % node.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.assertTrue(response.json['error_message'])
def test_patch_ports_subresource(self):
response = self.patch_json('/nodes/%s/ports' % self.node['uuid'],
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual(403, response.status_int)
def test_remove_uuid(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/uuid', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_mandatory_field(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/driver', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_chassis_uuid(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_uuid',
'value': self.chassis.uuid,
'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_add_chassis_uuid(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_uuid',
'value': self.chassis.uuid,
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_add_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'value': '1',
'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'value': '1',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_non_existent_chassis_uuid(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/chassis_uuid',
'value': 'eeeeeeee-dddd-cccc-bbbb-aaaaaaaaaaaa',
'op': 'replace'}], expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_internal_field(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/last_error', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_internal_field(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/power_state', 'op': 'replace',
'value': 'fake-state'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_maintenance(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/maintenance', 'op': 'replace',
'value': 'true'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_replace_consoled_enabled(self):
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/console_enabled',
'op': 'replace', 'value': True}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_provision_updated_at(self):
test_time = '2000-01-01 00:00:00'
response = self.patch_json('/nodes/%s' % self.node['uuid'],
[{'path': '/provision_updated_at',
'op': 'replace', 'value': test_time}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
class TestPost(base.FunctionalTest):
def setUp(self):
super(TestPost, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
@mock.patch.object(timeutils, 'utcnow')
def test_create_node(self, mock_utcnow):
ndict = post_get_test_node()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/nodes', ndict)
self.assertEqual(201, response.status_int)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
return_created_at = timeutils.parse_isotime(
result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_doesnt_contain_id(self):
# FIXME(comstud): I'd like to make this test not use the
# dbapi, however, no matter what I do when trying to mock
# Node.create(), the API fails to convert the objects.Node
# into the API Node object correctly (it leaves all fields
# as Unset).
with mock.patch.object(self.dbapi, 'create_node',
wraps=self.dbapi.create_node) as cn_mock:
ndict = post_get_test_node(extra={'foo': 123})
self.post_json('/nodes', ndict)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['extra'], result['extra'])
cn_mock.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', cn_mock.call_args[0][0])
def test_create_node_valid_extra(self):
ndict = post_get_test_node(extra={'foo': 123})
self.post_json('/nodes', ndict)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['extra'], result['extra'])
def test_create_node_invalid_extra(self):
ndict = post_get_test_node(extra={'foo': 0.123})
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_vendor_passthru_ok(self):
node = obj_utils.create_test_node(self.context)
uuid = node.uuid
info = {'foo': 'bar'}
with mock.patch.object(
rpcapi.ConductorAPI, 'vendor_passthru') as mock_vendor:
mock_vendor.return_value = 'OK'
response = self.post_json('/nodes/%s/vendor_passthru/test' % uuid,
info, expect_errors=False)
mock_vendor.assert_called_once_with(
mock.ANY, uuid, 'test', info, 'test-topic')
self.assertEqual('"OK"', response.body)
self.assertEqual(202, response.status_code)
def test_vendor_passthru_no_such_method(self):
node = obj_utils.create_test_node(self.context)
uuid = node.uuid
info = {'foo': 'bar'}
with mock.patch.object(
rpcapi.ConductorAPI, 'vendor_passthru') as mock_vendor:
mock_vendor.side_effect = exception.UnsupportedDriverExtension(
{'driver': node.driver,
'node': uuid,
'extension': 'test'})
response = self.post_json('/nodes/%s/vendor_passthru/test' % uuid,
info, expect_errors=True)
mock_vendor.assert_called_once_with(
mock.ANY, uuid, 'test', info, 'test-topic')
self.assertEqual(400, response.status_code)
def test_vendor_passthru_without_method(self):
node = obj_utils.create_test_node(self.context)
response = self.post_json('/nodes/%s/vendor_passthru' % node.uuid,
{'foo': 'bar'}, expect_errors=True)
self.assertEqual('application/json', response.content_type, )
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_post_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
pdict = apiutils.port_post_data(node_id=None)
pdict['node_uuid'] = node.uuid
response = self.post_json('/nodes/ports', pdict,
expect_errors=True)
self.assertEqual(403, response.status_int)
def test_create_node_no_mandatory_field_driver(self):
ndict = post_get_test_node()
del ndict['driver']
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_node_invalid_driver(self):
ndict = post_get_test_node()
self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_node_no_chassis_uuid(self):
ndict = post_get_test_node()
del ndict['chassis_uuid']
response = self.post_json('/nodes', ndict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_with_chassis_uuid(self):
ndict = post_get_test_node(chassis_uuid=self.chassis.uuid)
response = self.post_json('/nodes', ndict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['chassis_uuid'], result['chassis_uuid'])
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_chassis_uuid_not_found(self):
ndict = post_get_test_node(
chassis_uuid='1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e')
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['error_message'])
def test_create_node_with_internal_field(self):
ndict = post_get_test_node()
ndict['reservation'] = 'fake'
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['error_message'])
class TestDelete(base.FunctionalTest):
def setUp(self):
super(TestDelete, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_node(self, mock_dn):
node = obj_utils.create_test_node(self.context)
self.delete('/nodes/%s' % node.uuid)
mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(objects.Node, 'get_by_uuid')
def test_delete_node_not_found(self, mock_gbu):
node = obj_utils.get_test_node(context.get_admin_context())
mock_gbu.side_effect = exception.NodeNotFound(node=node.uuid)
response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
mock_gbu.assert_called_once_with(mock.ANY, node.uuid)
def test_delete_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
response = self.delete('/nodes/%s/ports' % node.uuid,
expect_errors=True)
self.assertEqual(403, response.status_int)
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_associated(self, mock_dn):
node = obj_utils.create_test_node(
self.context,
instance_uuid='aaaaaaaa-1111-bbbb-2222-cccccccccccc')
mock_dn.side_effect = exception.NodeAssociated(node=node.uuid,
instance=node.instance_uuid)
response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
self.assertEqual(409, response.status_int)
mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
class TestPut(base.FunctionalTest):
def setUp(self):
super(TestPut, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
self.node = obj_utils.create_test_node(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
self.mock_cnps = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_deploy')
self.mock_dnd = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_tear_down')
self.mock_dntd = p.start()
self.addCleanup(p.stop)
def test_power_state(self):
response = self.put_json('/nodes/%s/states/power' % self.node['uuid'],
{'target': states.POWER_ON})
self.assertEqual(202, response.status_code)
self.assertEqual('', response.body)
self.mock_cnps.assert_called_once_with(mock.ANY,
self.node['uuid'],
states.POWER_ON,
'test-topic')
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_power_invalid_state_request(self):
ret = self.put_json('/nodes/%s/states/power' % self.node.uuid,
{'target': 'not-supported'}, expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_provision_with_deploy(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE})
self.assertEqual(202, ret.status_code)
self.assertEqual('', ret.body)
self.mock_dnd.assert_called_once_with(
mock.ANY, self.node.uuid, False, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_with_tear_down(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.DELETED})
self.assertEqual(202, ret.status_code)
self.assertEqual('', ret.body)
self.mock_dntd.assert_called_once_with(
mock.ANY, self.node.uuid, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_invalid_state_request(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': 'not-supported'}, expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_provision_already_in_progress(self):
node = obj_utils.create_test_node(self.context, id=1,
uuid=utils.generate_uuid(),
target_provision_state=states.ACTIVE)
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(409, ret.status_code) # Conflict
def test_provision_with_tear_down_in_progress_deploywait(self):
node = obj_utils.create_test_node(
self.context, id=1, uuid=utils.generate_uuid(),
provision_state=states.DEPLOYWAIT,
target_provision_state=states.DEPLOYDONE)
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.DELETED})
self.assertEqual(202, ret.status_code)
self.assertEqual('', ret.body)
self.mock_dntd.assert_called_once_with(
mock.ANY, node.uuid, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_already_in_state(self):
node = obj_utils.create_test_node(
self.context, id=1, uuid=utils.generate_uuid(),
target_provision_state=states.NOSTATE,
provision_state=states.ACTIVE)
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_set_console_mode_enabled(self):
with mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode') \
as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "true"})
self.assertEqual(202, ret.status_code)
self.assertEqual('', ret.body)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
True, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_set_console_mode_disabled(self):
with mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode') \
as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "false"})
self.assertEqual(202, ret.status_code)
self.assertEqual('', ret.body)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
False, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_set_console_mode_bad_request(self):
with mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode') \
as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "invalid-value"},
expect_errors=True)
self.assertEqual(400, ret.status_code)
# assert set_console_mode wasn't called
assert not mock_scm.called
def test_set_console_mode_bad_request_missing_parameter(self):
with mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode') \
as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{}, expect_errors=True)
self.assertEqual(400, ret.status_code)
# assert set_console_mode wasn't called
assert not mock_scm.called
def test_set_console_mode_console_not_supported(self):
with mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode') \
as mock_scm:
mock_scm.side_effect = exception.UnsupportedDriverExtension(
extension='console', driver='test-driver')
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "true"}, expect_errors=True)
self.assertEqual(400, ret.status_code)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
True, 'test-topic')
def test_provision_node_in_maintenance_fail(self):
with mock.patch.object(rpcapi.ConductorAPI, 'do_node_deploy') as dnd:
node = obj_utils.create_test_node(self.context, id=1,
uuid=utils.generate_uuid(),
maintenance=True)
dnd.side_effect = exception.NodeInMaintenance(op='provisioning',
node=node.uuid)
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device'
% self.node.uuid, {'boot_device': device})
self.assertEqual(204, ret.status_code)
self.assertEqual('', ret.body)
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=False,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_not_supported(self, mock_sbd):
mock_sbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device'
% self.node.uuid, {'boot_device': device},
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=False,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_persistent(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device?persistent=True'
% self.node.uuid, {'boot_device': device})
self.assertEqual(204, ret.status_code)
self.assertEqual('', ret.body)
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=True,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_persistent_invalid_value(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device?persistent=blah'
% self.node.uuid, {'boot_device': device},
expect_errors=True)
self.assertEqual('application/json', ret.content_type)
self.assertEqual(400, ret.status_code)
| faizan-barmawer/openstack_ironic | ironic/tests/api/v1/test_nodes.py | Python | apache-2.0 | 56,300 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
from pisi.actionsapi import pisitools
from pisi.actionsapi import autotools
def setup():
shelltools.system("./autogen.sh --libdir=/usr/lib \
--sysconfdir=/etc \
--prefix=/usr \
--enable-shared \
--disable-static \
--without-gtk \
--disable-gtk-doc \
--enable-udisks \
--disable-actions \
--disable-demo \
--disable-dependency-tracking \
--enable-fast-install=autogen")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "COPYING") | vdemir/pisi_package | LXQT/base/libfm/actions.py | Python | gpl-3.0 | 1,034 |
from django.db import models
from Profiler.models import *
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
class NewsManager(models.Manager):
def addNews(self, request):
""" adds new news """
global obj
if "rollNo" in request.keys():
obj = Student.objects.getStudentByRollNo(request)
elif "facultyId" in request.keys():
obj = Faculty.objects.getFacultyByFacultyId(request)
publishedBy = obj.name
N = News(
headline=request['headline'],
description=request['description'],
publishedBy=publishedBy
)
if "image" in request.keys():
N.image = request['image']
if "link" in request.keys():
N.link = request['link']
N.save()
return N
def editNews(self, request):
""" edit the news details """
""" only headline and description are editable """
N = News.objects.get(id=request['id'])
N.headline = request['headline']
N.description = request['description']
N.save()
return N
def getNewsById(self, request):
""" get news details based on news id """
N = News.objects.get(id=request['id'])
return N
def retrieveNews(self, request):
""" retrieve all the news """
""" optional fields: headline, date, publishedBy """
N = News.objects.all()
if 'headline' in request.keys():
N = N.filter(headline=request['headline'])
if 'date' in request.keys():
N = N.filter(date=request['date'])
if 'publishedBy' in request.keys():
N = N.filter(publishedBy=request['publishedBy'])
return N
def retrieveLatestNews(self, request):
""" retrieves latest news """
""" criteria is to just return top 10 news """
lastTen = News.objects.filter(date__gte = request['since']).order_by('-date')[:10]
return lastTen
def retrieveMoreNews(self, request):
N = News.objects.all()
paginator = Paginator(N, request['rowsPerPage'])
page = request['pageNo']
try:
news = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
news = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
news = paginator.page(paginator.num_pages)
return news
def deleteNews(self, request):
""" deletes an existing news """
N = News.objects.get(id=request['id'])
N = N.delete()
return N
class News(models.Model):
# Headline
headline = models.CharField(max_length=250, null=False, blank=False)
# Description
description = models.CharField(max_length=4000, null=False, blank=False)
# Image
image = models.URLField(null=True, blank=True)
# Link
link = models.URLField(null=True, blank=True)
# Date
date = models.DateField(auto_now=False, auto_now_add=True)
# Published By
publishedBy = models.ForeignKey(Name, related_name="news_published_by", null=False, blank=False)
objects = NewsManager()
def __str__(self):
return self.headline
| IEEEDTU/CMS | NewsFeed/models/News.py | Python | mit | 3,319 |
import os, sys
dirname = os.path.dirname(__file__)
lib_path = os.path.join(dirname, "python_speech_features")
sys.path.append(lib_path)
import features as speechfeatures
import numpy as np
def filter(samplerate, signal, winlen=0.02, winstep=0.01,
nfilt=40, nfft=512, lowfreq=100, highfreq=5000, preemph=0.97):
"""extracts mel filterbank energies from a given signal
Args:
samplerate (int): samples taken per second
signal(1d numpy array): sample values
winlen(float): sliding window size in seconds
winstep(float): overlap of sliding windows in seconds
nfilt(int): number of mel filters to apply
nfft(int): size of the discrete fourier transform to use
lowfreq(int): lowest frequency to collect
highfreq(int): highest frequency to collect
preemph(float): preemphesis factor
Returns:
feat(2d numpy array): filterbank energies
"""
feat, energy = speechfeatures.fbank(np.array(signal), samplerate, winlen=winlen,winstep=winstep,
nfilt=nfilt, nfft=nfft, lowfreq=lowfreq, highfreq=highfreq, preemph=preemph)
return np.swapaxes(feat,0,1)
def logfilter(samplerate, signal, *args, **kwargs):
"""extracts log mel filterbank energies from a given signal
Args:
samplerate (int): samples taken per second
signal(1d numpy array): sample values
*args: piped through to filter
**kwargs: piped thought to filter
Returns:
feat(2d numpy array): logarithm of filterbank energies
"""
feat = filter(samplerate, signal, *args, **kwargs)
return np.log(feat) | twerkmeister/iLID | preprocessing/audio/melfilterbank.py | Python | mit | 1,553 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-04 14:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20160904_1214'),
]
operations = [
migrations.AlterField(
model_name='battle',
name='note',
field=models.CharField(default='null', max_length=300),
),
]
| kushsharma/GotAPI | api/migrations/0005_auto_20160904_1934.py | Python | apache-2.0 | 462 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class BindingList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid):
"""
Initialize the BindingList
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service that the resource is associated with
:returns: twilio.rest.notify.v1.service.binding.BindingList
:rtype: twilio.rest.notify.v1.service.binding.BindingList
"""
super(BindingList, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, }
self._uri = '/Services/{service_sid}/Bindings'.format(**self._solution)
def create(self, identity, binding_type, address, tag=values.unset,
notification_protocol_version=values.unset,
credential_sid=values.unset, endpoint=values.unset):
"""
Create the BindingInstance
:param unicode identity: The `identity` value that identifies the new resource's User
:param BindingInstance.BindingType binding_type: The type of the Binding
:param unicode address: The channel-specific address
:param unicode tag: A tag that can be used to select the Bindings to notify
:param unicode notification_protocol_version: The protocol version to use to send the notification
:param unicode credential_sid: The SID of the Credential resource to be used to send notifications to this Binding
:param unicode endpoint: Deprecated
:returns: The created BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
data = values.of({
'Identity': identity,
'BindingType': binding_type,
'Address': address,
'Tag': serialize.map(tag, lambda e: e),
'NotificationProtocolVersion': notification_protocol_version,
'CredentialSid': credential_sid,
'Endpoint': endpoint,
})
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return BindingInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def stream(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, limit=None, page_size=None):
"""
Streams BindingInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.notify.v1.service.binding.BindingInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
start_date=start_date,
end_date=end_date,
identity=identity,
tag=tag,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'])
def list(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, limit=None, page_size=None):
"""
Lists BindingInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.notify.v1.service.binding.BindingInstance]
"""
return list(self.stream(
start_date=start_date,
end_date=end_date,
identity=identity,
tag=tag,
limit=limit,
page_size=page_size,
))
def page(self, start_date=values.unset, end_date=values.unset,
identity=values.unset, tag=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of BindingInstance records from the API.
Request is executed immediately
:param date start_date: Only include usage that has occurred on or after this date
:param date end_date: Only include usage that occurred on or before this date
:param unicode identity: The `identity` value of the resources to read
:param unicode tag: Only list Bindings that have all of the specified Tags
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
data = values.of({
'StartDate': serialize.iso8601_date(start_date),
'EndDate': serialize.iso8601_date(end_date),
'Identity': serialize.map(identity, lambda e: e),
'Tag': serialize.map(tag, lambda e: e),
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(method='GET', uri=self._uri, params=data, )
return BindingPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of BindingInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return BindingPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a BindingContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
return BindingContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a BindingContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
return BindingContext(self._version, service_sid=self._solution['service_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Notify.V1.BindingList>'
class BindingPage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the BindingPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param service_sid: The SID of the Service that the resource is associated with
:returns: twilio.rest.notify.v1.service.binding.BindingPage
:rtype: twilio.rest.notify.v1.service.binding.BindingPage
"""
super(BindingPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of BindingInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.notify.v1.service.binding.BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
return BindingInstance(self._version, payload, service_sid=self._solution['service_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Notify.V1.BindingPage>'
class BindingContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, service_sid, sid):
"""
Initialize the BindingContext
:param Version version: Version that contains the resource
:param service_sid: The SID of the Service to fetch the resource from
:param sid: The unique string that identifies the resource
:returns: twilio.rest.notify.v1.service.binding.BindingContext
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
super(BindingContext, self).__init__(version)
# Path Solution
self._solution = {'service_sid': service_sid, 'sid': sid, }
self._uri = '/Services/{service_sid}/Bindings/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch the BindingInstance
:returns: The fetched BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return BindingInstance(
self._version,
payload,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the BindingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Notify.V1.BindingContext {}>'.format(context)
class BindingInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
class BindingType(object):
APN = "apn"
GCM = "gcm"
SMS = "sms"
FCM = "fcm"
FACEBOOK_MESSENGER = "facebook-messenger"
ALEXA = "alexa"
def __init__(self, version, payload, service_sid, sid=None):
"""
Initialize the BindingInstance
:returns: twilio.rest.notify.v1.service.binding.BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
super(BindingInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'account_sid': payload.get('account_sid'),
'service_sid': payload.get('service_sid'),
'credential_sid': payload.get('credential_sid'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'notification_protocol_version': payload.get('notification_protocol_version'),
'endpoint': payload.get('endpoint'),
'identity': payload.get('identity'),
'binding_type': payload.get('binding_type'),
'address': payload.get('address'),
'tags': payload.get('tags'),
'url': payload.get('url'),
'links': payload.get('links'),
}
# Context
self._context = None
self._solution = {'service_sid': service_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: BindingContext for this BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingContext
"""
if self._context is None:
self._context = BindingContext(
self._version,
service_sid=self._solution['service_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def service_sid(self):
"""
:returns: The SID of the Service that the resource is associated with
:rtype: unicode
"""
return self._properties['service_sid']
@property
def credential_sid(self):
"""
:returns: The SID of the Credential resource to be used to send notifications to this Binding
:rtype: unicode
"""
return self._properties['credential_sid']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def notification_protocol_version(self):
"""
:returns: The protocol version to use to send the notification
:rtype: unicode
"""
return self._properties['notification_protocol_version']
@property
def endpoint(self):
"""
:returns: Deprecated
:rtype: unicode
"""
return self._properties['endpoint']
@property
def identity(self):
"""
:returns: The `identity` value that identifies the new resource's User
:rtype: unicode
"""
return self._properties['identity']
@property
def binding_type(self):
"""
:returns: The type of the Binding
:rtype: unicode
"""
return self._properties['binding_type']
@property
def address(self):
"""
:returns: The channel-specific address
:rtype: unicode
"""
return self._properties['address']
@property
def tags(self):
"""
:returns: The list of tags associated with this Binding
:rtype: unicode
"""
return self._properties['tags']
@property
def url(self):
"""
:returns: The absolute URL of the Binding resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URLs of related resources
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch the BindingInstance
:returns: The fetched BindingInstance
:rtype: twilio.rest.notify.v1.service.binding.BindingInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the BindingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Notify.V1.BindingInstance {}>'.format(context)
| Vagab0nd/SiCKRAGE | lib3/twilio/rest/notify/v1/service/binding.py | Python | gpl-3.0 | 18,216 |
#
# Author: Pearu Peterson, March 2002
#
# additions by Travis Oliphant, March 2002
# additions by Eric Jones, June 2002
# additions by Johannes Loehnert, June 2006
# additions by Bart Vandereycken, June 2006
# additions by Andrew D Straw, May 2007
# additions by Tiziano Zito, November 2008
#
# April 2010: Functions for LU, QR, SVD, Schur and Cholesky decompositions were
# moved to their own files. Still in this file are functions for eigenstuff
# and for the Hessenberg form.
__all__ = ['eig','eigh','eig_banded','eigvals','eigvalsh', 'eigvals_banded',
'hessenberg']
import numpy
from numpy import array, asarray_chkfinite, asarray, diag, zeros, ones, \
isfinite, inexact, nonzero, iscomplexobj, cast, flatnonzero, conj
# Local imports
from scipy.linalg import calc_lwork
from misc import LinAlgError, _datacopied
from lapack import get_lapack_funcs
from blas import get_blas_funcs
_I = cast['F'](1j)
def _make_complex_eigvecs(w, vin, dtype):
"""
Produce complex-valued eigenvectors from LAPACK DGGEV real-valued output
"""
# - see LAPACK man page DGGEV at ALPHAI
v = numpy.array(vin, dtype=dtype)
m = (w.imag > 0)
m[:-1] |= (w.imag[1:] < 0) # workaround for LAPACK bug, cf. ticket #709
for i in flatnonzero(m):
v.imag[:,i] = vin[:,i+1]
conj(v[:,i], v[:,i+1])
return v
def _geneig(a1, b, left, right, overwrite_a, overwrite_b):
b1 = asarray(b)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
ggev, = get_lapack_funcs(('ggev',), (a1, b1))
cvl, cvr = left, right
if ggev.module_name[:7] == 'clapack':
raise NotImplementedError('calling ggev from %s' % ggev.module_name)
res = ggev(a1, b1, lwork=-1)
lwork = res[-2][0].real.astype(numpy.int)
if ggev.prefix in 'cz':
alpha, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr, lwork,
overwrite_a, overwrite_b)
w = alpha / beta
else:
alphar, alphai, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr, lwork,
overwrite_a,overwrite_b)
w = (alphar + _I * alphai) / beta
if info < 0:
raise ValueError('illegal value in %d-th argument of internal ggev'
% -info)
if info > 0:
raise LinAlgError("generalized eig algorithm did not converge (info=%d)"
% info)
only_real = numpy.logical_and.reduce(numpy.equal(w.imag, 0.0))
if not (ggev.prefix in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eig(a, b=None, left=False, right=True, overwrite_a=False, overwrite_b=False):
"""Solve an ordinary or generalized eigenvalue problem of a square matrix.
Find eigenvalues w and right or left eigenvectors of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
a.H vl[:,i] = w[i].conj() b.H vl[:,i]
where .H is the Hermitean conjugation.
Parameters
----------
a : array, shape (M, M)
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : array, shape (M, M)
Right-hand side matrix in a generalized eigenvalue problem.
If omitted, identity matrix is assumed.
left : boolean
Whether to calculate and return left eigenvectors
right : boolean
Whether to calculate and return right eigenvectors
overwrite_a : boolean
Whether to overwrite data in a (may improve performance)
overwrite_b : boolean
Whether to overwrite data in b (may improve performance)
Returns
-------
w : double or complex array, shape (M,)
The eigenvalues, each repeated according to its multiplicity.
(if left == True)
vl : double or complex array, shape (M, M)
The normalized left eigenvector corresponding to the eigenvalue w[i]
is the column v[:,i].
(if right == True)
vr : double or complex array, shape (M, M)
The normalized right eigenvector corresponding to the eigenvalue w[i]
is the column vr[:,i].
Raises LinAlgError if eigenvalue computation does not converge
See Also
--------
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
"""
a1 = asarray_chkfinite(a)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if b is not None:
b = asarray_chkfinite(b)
if b.shape != a1.shape:
raise ValueError('a and b must have the same shape')
return _geneig(a1, b, left, right, overwrite_a, overwrite_b)
geev, = get_lapack_funcs(('geev',), (a1,))
compute_vl, compute_vr = left, right
if geev.module_name[:7] == 'flapack':
lwork = calc_lwork.geev(geev.prefix, a1.shape[0],
compute_vl, compute_vr)[1]
if geev.prefix in 'cz':
w, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
else:
wr, wi, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
t = {'f':'F','d':'D'}[wr.dtype.char]
w = wr + _I * wi
else: # 'clapack'
if geev.prefix in 'cz':
w, vl, vr, info = geev(a1,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
else:
wr, wi, vl, vr, info = geev(a1,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
t = {'f':'F','d':'D'}[wr.dtype.char]
w = wr + _I * wi
if info < 0:
raise ValueError('illegal value in %d-th argument of internal geev'
% -info)
if info > 0:
raise LinAlgError("eig algorithm did not converge (only eigenvalues "
"with order >= %d have converged)" % info)
only_real = numpy.logical_and.reduce(numpy.equal(w.imag, 0.0))
if not (geev.prefix in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eigh(a, b=None, lower=True, eigvals_only=False, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1):
"""Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w and optionally eigenvectors v of matrix a, where
b is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : array, shape (M, M)
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : array, shape (M, M)
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : boolean
Whether the pertinent array data is taken from the lower or upper
triangle of a. (Default: lower)
eigvals_only : boolean
Whether to calculate only eigenvalues and no eigenvectors.
(Default: both are calculated)
turbo : boolean
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi)
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo < hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type: integer
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : boolean
Whether to overwrite data in a (may improve performance)
overwrite_b : boolean
Whether to overwrite data in b (may improve performance)
Returns
-------
w : real array, shape (N,)
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
(if eigvals_only == False)
v : complex array, shape (M, N)
The normalized selected eigenvector corresponding to the
eigenvalue w[i] is the column v[:,i]. Normalization:
type 1 and 3: v.conj() a v = w
type 2: inv(v).conj() a inv(v) = w
type = 1 or 2: v.conj() b v = I
type = 3 : v.conj() inv(b) v = I
Raises LinAlgError if eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
a1 = asarray_chkfinite(a)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if iscomplexobj(a1):
cplx = True
else:
cplx = False
if b is not None:
b1 = asarray_chkfinite(b)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
if b1.shape != a1.shape:
raise ValueError("wrong b dimensions %s, should "
"be %s" % (str(b1.shape), str(a1.shape)))
if iscomplexobj(b1):
cplx = True
else:
cplx = cplx or False
else:
b1 = None
# Set job for fortran routines
_job = (eigvals_only and 'N') or 'V'
# port eigenvalue range from python to fortran convention
if eigvals is not None:
lo, hi = eigvals
if lo < 0 or hi >= a1.shape[0]:
raise ValueError('The eigenvalue range specified is not valid.\n'
'Valid range is [%s,%s]' % (0, a1.shape[0]-1))
lo += 1
hi += 1
eigvals = (lo, hi)
# set lower
if lower:
uplo = 'L'
else:
uplo = 'U'
# fix prefix for lapack routines
if cplx:
pfx = 'he'
else:
pfx = 'sy'
# Standard Eigenvalue Problem
# Use '*evr' routines
# FIXME: implement calculation of optimal lwork
# for all lapack routines
if b1 is None:
(evr,) = get_lapack_funcs((pfx+'evr',), (a1,))
if eigvals is None:
w, v, info = evr(a1, uplo=uplo, jobz=_job, range="A", il=1,
iu=a1.shape[0], overwrite_a=overwrite_a)
else:
(lo, hi)= eigvals
w_tot, v, info = evr(a1, uplo=uplo, jobz=_job, range="I",
il=lo, iu=hi, overwrite_a=overwrite_a)
w = w_tot[0:hi-lo+1]
# Generalized Eigenvalue Problem
else:
# Use '*gvx' routines if range is specified
if eigvals is not None:
(gvx,) = get_lapack_funcs((pfx+'gvx',), (a1,b1))
(lo, hi) = eigvals
w_tot, v, ifail, info = gvx(a1, b1, uplo=uplo, iu=hi,
itype=type,jobz=_job, il=lo,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
w = w_tot[0:hi-lo+1]
# Use '*gvd' routine if turbo is on and no eigvals are specified
elif turbo:
(gvd,) = get_lapack_funcs((pfx+'gvd',), (a1,b1))
v, w, info = gvd(a1, b1, uplo=uplo, itype=type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Use '*gv' routine if turbo is off and no eigvals are specified
else:
(gv,) = get_lapack_funcs((pfx+'gv',), (a1,b1))
v, w, info = gv(a1, b1, uplo=uplo, itype= type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Check if we had a successful exit
if info == 0:
if eigvals_only:
return w
else:
return w, v
elif info < 0:
raise LinAlgError("illegal value in %i-th argument of internal"
" fortran routine." % (-info))
elif info > 0 and b1 is None:
raise LinAlgError("unrecoverable internal error.")
# The algorithm failed to converge.
elif info > 0 and info <= b1.shape[0]:
if eigvals is not None:
raise LinAlgError("the eigenvectors %s failed to"
" converge." % nonzero(ifail)-1)
else:
raise LinAlgError("internal fortran routine failed to converge: "
"%i off-diagonal elements of an "
"intermediate tridiagonal form did not converge"
" to zero." % info)
# This occurs when b is not positive definite
else:
raise LinAlgError("the leading minor of order %i"
" of 'b' is not positive definite. The"
" factorization of 'b' could not be completed"
" and no eigenvalues or eigenvectors were"
" computed." % (info-b1.shape[0]))
def eig_banded(a_band, lower=False, eigvals_only=False, overwrite_a_band=False,
select='a', select_range=None, max_ev = 0):
"""Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w and optionally right eigenvectors v of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in ab either in lower diagonal or upper
diagonal ordered form:
ab[u + i - j, j] == a[i,j] (if upper form; i <= j)
ab[ i - j, j] == a[i,j] (if lower form; i >= j)
Example of ab (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : array, shape (M, u+1)
Banded matrix whose eigenvalues to calculate
lower : boolean
Is the matrix in the lower form. (Default is upper form)
eigvals_only : boolean
Compute only the eigenvalues and no eigenvectors.
(Default: calculate also eigenvectors)
overwrite_a_band:
Discard data in a_band (may enhance performance)
select: {'a', 'v', 'i'}
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max)
Range of selected eigenvalues
max_ev : integer
For select=='v', maximum number of eigenvalues expected.
For other values of select, has no meaning.
In doubt, leave this parameter untouched.
Returns
-------
w : array, shape (M,)
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
v : double or complex double array, shape (M, M)
The normalized eigenvector corresponding to the eigenvalue w[i] is
the column v[:,i].
Raises LinAlgError if eigenvalue computation does not converge
"""
if eigvals_only or overwrite_a_band:
a1 = asarray_chkfinite(a_band)
overwrite_a_band = overwrite_a_band or (_datacopied(a1, a_band))
else:
a1 = array(a_band)
if issubclass(a1.dtype.type, inexact) and not isfinite(a1).all():
raise ValueError("array must not contain infs or NaNs")
overwrite_a_band = 1
if len(a1.shape) != 2:
raise ValueError('expected two-dimensional array')
if select.lower() not in [0, 1, 2, 'a', 'v', 'i', 'all', 'value', 'index']:
raise ValueError('invalid argument for select')
if select.lower() in [0, 'a', 'all']:
if a1.dtype.char in 'GFD':
bevd, = get_lapack_funcs(('hbevd',), (a1,))
# FIXME: implement this somewhen, for now go with builtin values
# FIXME: calc optimal lwork by calling ?hbevd(lwork=-1)
# or by using calc_lwork.f ???
# lwork = calc_lwork.hbevd(bevd.prefix, a1.shape[0], lower)
internal_name = 'hbevd'
else: # a1.dtype.char in 'fd':
bevd, = get_lapack_funcs(('sbevd',), (a1,))
# FIXME: implement this somewhen, for now go with builtin values
# see above
# lwork = calc_lwork.sbevd(bevd.prefix, a1.shape[0], lower)
internal_name = 'sbevd'
w,v,info = bevd(a1, compute_v=not eigvals_only,
lower=lower,
overwrite_ab=overwrite_a_band)
if select.lower() in [1, 2, 'i', 'v', 'index', 'value']:
# calculate certain range only
if select.lower() in [2, 'i', 'index']:
select = 2
vl, vu, il, iu = 0.0, 0.0, min(select_range), max(select_range)
if min(il, iu) < 0 or max(il, iu) >= a1.shape[1]:
raise ValueError('select_range out of bounds')
max_ev = iu - il + 1
else: # 1, 'v', 'value'
select = 1
vl, vu, il, iu = min(select_range), max(select_range), 0, 0
if max_ev == 0:
max_ev = a_band.shape[1]
if eigvals_only:
max_ev = 1
# calculate optimal abstol for dsbevx (see manpage)
if a1.dtype.char in 'fF': # single precision
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='f'),))
else:
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='d'),))
abstol = 2 * lamch('s')
if a1.dtype.char in 'GFD':
bevx, = get_lapack_funcs(('hbevx',), (a1,))
internal_name = 'hbevx'
else: # a1.dtype.char in 'gfd'
bevx, = get_lapack_funcs(('sbevx',), (a1,))
internal_name = 'sbevx'
# il+1, iu+1: translate python indexing (0 ... N-1) into Fortran
# indexing (1 ... N)
w, v, m, ifail, info = bevx(a1, vl, vu, il+1, iu+1,
compute_v=not eigvals_only,
mmax=max_ev,
range=select, lower=lower,
overwrite_ab=overwrite_a_band,
abstol=abstol)
# crop off w and v
w = w[:m]
if not eigvals_only:
v = v[:, :m]
if info < 0:
raise ValueError('illegal value in %d-th argument of internal %s'
% (-info, internal_name))
if info > 0:
raise LinAlgError("eig algorithm did not converge")
if eigvals_only:
return w
return w, v
def eigvals(a, b=None, overwrite_a=False):
"""Compute eigenvalues from an ordinary or generalized eigenvalue problem.
Find eigenvalues of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
Parameters
----------
a : array, shape (M, M)
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : array, shape (M, M)
Right-hand side matrix in a generalized eigenvalue problem.
If omitted, identity matrix is assumed.
overwrite_a : boolean
Whether to overwrite data in a (may improve performance)
Returns
-------
w : double or complex array, shape (M,)
The eigenvalues, each repeated according to its multiplicity,
but not in any specific order.
Raises LinAlgError if eigenvalue computation does not converge
See Also
--------
eigvalsh : eigenvalues of symmetric or Hemitiean arrays
eig : eigenvalues and right eigenvectors of general arrays
eigh : eigenvalues and eigenvectors of symmetric/Hermitean arrays.
"""
return eig(a, b=b, left=0, right=0, overwrite_a=overwrite_a)
def eigvalsh(a, b=None, lower=True, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1):
"""Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w of matrix a, where b is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : array, shape (M, M)
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : array, shape (M, M)
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : boolean
Whether the pertinent array data is taken from the lower or upper
triangle of a. (Default: lower)
turbo : boolean
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi)
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo < hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type: integer
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : boolean
Whether to overwrite data in a (may improve performance)
overwrite_b : boolean
Whether to overwrite data in b (may improve performance)
Returns
-------
w : real array, shape (N,)
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
Raises LinAlgError if eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eigvals : eigenvalues of general arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
return eigh(a, b=b, lower=lower, eigvals_only=True,
overwrite_a=overwrite_a, overwrite_b=overwrite_b,
turbo=turbo, eigvals=eigvals, type=type)
def eigvals_banded(a_band, lower=False, overwrite_a_band=False,
select='a', select_range=None):
"""Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in ab either in lower diagonal or upper
diagonal ordered form:
ab[u + i - j, j] == a[i,j] (if upper form; i <= j)
ab[ i - j, j] == a[i,j] (if lower form; i >= j)
Example of ab (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : array, shape (M, u+1)
Banded matrix whose eigenvalues to calculate
lower : boolean
Is the matrix in the lower form. (Default is upper form)
overwrite_a_band:
Discard data in a_band (may enhance performance)
select: {'a', 'v', 'i'}
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max)
Range of selected eigenvalues
Returns
-------
w : array, shape (M,)
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
Raises LinAlgError if eigenvalue computation does not converge
See Also
--------
eig_banded : eigenvalues and right eigenvectors for symmetric/Hermitian band matrices
eigvals : eigenvalues of general arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
return eig_banded(a_band, lower=lower, eigvals_only=1,
overwrite_a_band=overwrite_a_band, select=select,
select_range=select_range)
_double_precision = ['i','l','d']
def hessenberg(a, calc_q=False, overwrite_a=False):
"""Compute Hessenberg form of a matrix.
The Hessenberg decomposition is
A = Q H Q^H
where Q is unitary/orthogonal and H has only zero elements below the first
subdiagonal.
Parameters
----------
a : array, shape (M,M)
Matrix to bring into Hessenberg form
calc_q : boolean
Whether to compute the transformation matrix
overwrite_a : boolean
Whether to ovewrite data in a (may improve performance)
Returns
-------
H : array, shape (M,M)
Hessenberg form of A
(If calc_q == True)
Q : array, shape (M,M)
Unitary/orthogonal similarity transformation matrix s.t. A = Q H Q^H
"""
a1 = asarray(a)
if len(a1.shape) != 2 or (a1.shape[0] != a1.shape[1]):
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
gehrd,gebal = get_lapack_funcs(('gehrd','gebal'), (a1,))
ba, lo, hi, pivscale, info = gebal(a1, permute=1, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gebal '
'(hessenberg)' % -info)
n = len(a1)
lwork = calc_lwork.gehrd(gehrd.prefix, n, lo, hi)
hq, tau, info = gehrd(ba, lo=lo, hi=hi, lwork=lwork, overwrite_a=1)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gehrd '
'(hessenberg)' % -info)
if not calc_q:
for i in range(lo, hi):
hq[i+2:hi+1, i] = 0.0
return hq
# XXX: Use ORGHR routines to compute q.
typecode = hq.dtype
ger,gemm = get_blas_funcs(('ger','gemm'), dtype=typecode)
q = None
for i in range(lo, hi):
if tau[i]==0.0:
continue
v = zeros(n, dtype=typecode)
v[i+1] = 1.0
v[i+2:hi+1] = hq[i+2:hi+1, i]
hq[i+2:hi+1, i] = 0.0
h = ger(-tau[i], v, v,a=diag(ones(n, dtype=typecode)), overwrite_a=1)
if q is None:
q = h
else:
q = gemm(1.0, q, h)
if q is None:
q = diag(ones(n, dtype=typecode))
return hq, q
| jrversteegh/softsailor | deps/scipy-0.10.0b2/scipy/linalg/decomp.py | Python | gpl-3.0 | 28,803 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A runner implementation that submits a job for remote execution.
"""
import time
import uuid
from concurrent import futures
import grpc
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.pipeline import Pipeline
from apache_beam.portability.api import beam_job_api_pb2
from apache_beam.portability.api import beam_job_api_pb2_grpc
from apache_beam.runners.runner import PipelineState
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
class JobService(beam_job_api_pb2_grpc.JobServiceServicer):
def __init__(self):
self.jobs = {}
def run(self, request, context):
job_id = uuid.uuid4().get_hex()
pipeline_result = Pipeline.from_runner_api(
request.pipeline,
'DirectRunner',
PipelineOptions()).run()
self.jobs[job_id] = pipeline_result
return beam_job_api_pb2.SubmitJobResponse(jobId=job_id)
def getState(self, request, context):
pipeline_result = self.jobs[request.jobId]
return beam_job_api_pb2.GetJobStateResponse(
state=self._map_state_to_jobState(pipeline_result.state))
def cancel(self, request, context):
pipeline_result = self.jobs[request.jobId]
pipeline_result.cancel()
return beam_job_api_pb2.CancelJobResponse(
state=self._map_state_to_jobState(pipeline_result.state))
def getMessageStream(self, request, context):
pipeline_result = self.jobs[request.jobId]
pipeline_result.wait_until_finish()
yield beam_job_api_pb2.JobMessagesResponse(
stateResponse=beam_job_api_pb2.GetJobStateResponse(
state=self._map_state_to_jobState(pipeline_result.state)))
def getStateStream(self, request, context):
context.set_details('Not Implemented for direct runner!')
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
return
@staticmethod
def _map_state_to_jobState(state):
if state == PipelineState.UNKNOWN:
return beam_job_api_pb2.JobState.UNKNOWN
elif state == PipelineState.STOPPED:
return beam_job_api_pb2.JobState.STOPPED
elif state == PipelineState.RUNNING:
return beam_job_api_pb2.JobState.RUNNING
elif state == PipelineState.DONE:
return beam_job_api_pb2.JobState.DONE
elif state == PipelineState.FAILED:
return beam_job_api_pb2.JobState.FAILED
elif state == PipelineState.CANCELLED:
return beam_job_api_pb2.JobState.CANCELLED
elif state == PipelineState.UPDATED:
return beam_job_api_pb2.JobState.UPDATED
elif state == PipelineState.DRAINING:
return beam_job_api_pb2.JobState.DRAINING
elif state == PipelineState.DRAINED:
return beam_job_api_pb2.JobState.DRAINED
else:
raise ValueError('Unknown pipeline state')
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
beam_job_api_pb2_grpc.add_JobServiceServicer_to_server(JobService(), server)
server.add_insecure_port('[::]:50051')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
| eljefe6a/incubator-beam | sdks/python/apache_beam/runners/experimental/python_rpc_direct/server.py | Python | apache-2.0 | 3,835 |
# -*- coding: utf-8 -*-
import time
from toxicbuild.ui import settings
from behave import given, then, when
from tests.webui import take_screenshot
@take_screenshot
def logged_in_webui(context):
browser = context.browser
base_url = 'http://{}:{}/'.format(settings.TEST_WEB_HOST,
settings.TORNADO_PORT)
url = base_url + 'login'
browser.get(url)
if not browser.is_logged:
browser.do_login(url, 'someguy', '123')
el = browser.find_element_by_class_name('logout-link-container')
browser.wait_element_become_visible(el)
given_logged_in_webui = given('the user is logged in the web interface')(
logged_in_webui)
@take_screenshot
def sees_message(context, msg):
browser = context.browser
is_present = browser.wait_text_become_present(msg)
assert is_present
# for btn in browser.find_elements_by_class_name('close-msg'):
# browser.click(btn)
then_sees_message = then('he sees the "{msg}" message')(sees_message)
@take_screenshot
def navigate2settings(context):
browser = context.browser
btn = browser.find_element_by_xpath('a[href="/settings/repositories"]')
browser.click(btn)
browser.wait_text_become_present('Manage repositories')
when_navigate2settings = when('he navigates to the settings page')(
navigate2settings)
@then('he sees the main page')
@take_screenshot
def user_sees_main_main_page_login(context):
browser = context.browser
txt = 'Logout'
is_present = browser.wait_text_become_present(txt)
assert is_present
@when('clicks in the save button')
@when('clicks in the add button')
@take_screenshot
def click_add_button(context):
browser = context.browser
time.sleep(0.5)
btn = browser.find_element_by_id('btn-save-obj')
browser.click(btn)
| jucacrispim/toxicbuild | tests/webui/steps/base_steps.py | Python | agpl-3.0 | 1,816 |
# Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from UM.Backend.Backend import Backend, BackendState
from UM.Application import Application
from UM.Scene.SceneNode import SceneNode
from UM.Preferences import Preferences
from UM.Signal import Signal
from UM.Logger import Logger
from UM.Message import Message, MessageType
from UM.PluginRegistry import PluginRegistry
from UM.Resources import Resources
from UM.Settings.Validator import ValidatorState #To find if a setting is in an error state. We can't slice then.
from UM.Platform import Platform
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Qt.Duration import DurationFormat
from PyQt5.QtCore import QObject, pyqtSlot
from UM.Qt.Duration import DurationFormat
from collections import defaultdict
from cura.Settings.ExtruderManager import ExtruderManager
from . import ProcessSlicedLayersJob
from . import StartSliceJob
import os
import sys
from time import time
from PyQt5.QtCore import QTimer
import Arcus
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
class CuraEngineBackend(QObject, Backend):
## Starts the back-end plug-in.
#
# This registers all the signal listeners and prepares for communication
# with the back-end in general.
# CuraEngineBackend is exposed to qml as well.
def __init__(self, parent = None):
super().__init__(parent = parent)
# Find out where the engine is located, and how it is called.
# This depends on how Cura is packaged and which OS we are running on.
executable_name = "CuraEngine"
if Platform.isWindows():
executable_name += ".exe"
default_engine_location = executable_name
if os.path.exists(os.path.join(Application.getInstallPrefix(), "bin", executable_name)):
default_engine_location = os.path.join(Application.getInstallPrefix(), "bin", executable_name)
if hasattr(sys, "frozen"):
default_engine_location = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), executable_name)
if Platform.isLinux() and not default_engine_location:
if not os.getenv("PATH"):
raise OSError("There is something wrong with your Linux installation.")
for pathdir in os.getenv("PATH").split(os.pathsep):
execpath = os.path.join(pathdir, executable_name)
if os.path.exists(execpath):
default_engine_location = execpath
break
if not default_engine_location:
raise EnvironmentError("Could not find CuraEngine")
Logger.log("i", "Found CuraEngine at: %s" %(default_engine_location))
default_engine_location = os.path.abspath(default_engine_location)
Preferences.getInstance().addPreference("backend/location", default_engine_location)
# Workaround to disable layer view processing if layer view is not active.
self._layer_view_active = False
Application.getInstance().getController().activeViewChanged.connect(self._onActiveViewChanged)
Application.getInstance().getBuildPlateModel().activeBuildPlateChanged.connect(self._onActiveViewChanged)
self._onActiveViewChanged()
self._stored_layer_data = []
self._stored_optimized_layer_data = {} # key is build plate number, then arrays are stored until they go to the ProcessSlicesLayersJob
self._scene = Application.getInstance().getController().getScene()
self._scene.sceneChanged.connect(self._onSceneChanged)
# Triggers for auto-slicing. Auto-slicing is triggered as follows:
# - auto-slicing is started with a timer
# - whenever there is a value change, we start the timer
# - sometimes an error check can get scheduled for a value change, in that case, we ONLY want to start the
# auto-slicing timer when that error check is finished
# If there is an error check, it will set the "_is_error_check_scheduled" flag, stop the auto-slicing timer,
# and only wait for the error check to be finished to start the auto-slicing timer again.
#
self._global_container_stack = None
Application.getInstance().globalContainerStackChanged.connect(self._onGlobalStackChanged)
Application.getInstance().getExtruderManager().extrudersAdded.connect(self._onGlobalStackChanged)
self._onGlobalStackChanged()
Application.getInstance().stacksValidationFinished.connect(self._onStackErrorCheckFinished)
# A flag indicating if an error check was scheduled
# If so, we will stop the auto-slice timer and start upon the error check
self._is_error_check_scheduled = False
# Listeners for receiving messages from the back-end.
self._message_handlers["cura.proto.Layer"] = self._onLayerMessage
self._message_handlers["cura.proto.LayerOptimized"] = self._onOptimizedLayerMessage
self._message_handlers["cura.proto.Progress"] = self._onProgressMessage
self._message_handlers["cura.proto.GCodeLayer"] = self._onGCodeLayerMessage
self._message_handlers["cura.proto.GCodePrefix"] = self._onGCodePrefixMessage
self._message_handlers["cura.proto.PrintTimeMaterialEstimates"] = self._onPrintTimeMaterialEstimates
self._message_handlers["cura.proto.SlicingFinished"] = self._onSlicingFinishedMessage
self._start_slice_job = None
self._start_slice_job_build_plate = None
self._slicing = False # Are we currently slicing?
self._restart = False # Back-end is currently restarting?
self._tool_active = False # If a tool is active, some tasks do not have to do anything
self._always_restart = True # Always restart the engine when starting a new slice. Don't keep the process running. TODO: Fix engine statelessness.
self._process_layers_job = None # The currently active job to process layers, or None if it is not processing layers.
self._build_plates_to_be_sliced = [] # what needs slicing?
self._engine_is_fresh = True # Is the newly started engine used before or not?
self._backend_log_max_lines = 20000 # Maximum number of lines to buffer
self._error_message = None # Pop-up message that shows errors.
self._last_num_objects = defaultdict(int) # Count number of objects to see if there is something changed
self._postponed_scene_change_sources = [] # scene change is postponed (by a tool)
self.backendQuit.connect(self._onBackendQuit)
self.backendConnected.connect(self._onBackendConnected)
# When a tool operation is in progress, don't slice. So we need to listen for tool operations.
Application.getInstance().getController().toolOperationStarted.connect(self._onToolOperationStarted)
Application.getInstance().getController().toolOperationStopped.connect(self._onToolOperationStopped)
self._slice_start_time = None
Preferences.getInstance().addPreference("general/auto_slice", True)
self._use_timer = False
# When you update a setting and other settings get changed through inheritance, many propertyChanged signals are fired.
# This timer will group them up, and only slice for the last setting changed signal.
# TODO: Properly group propertyChanged signals by whether they are triggered by the same user interaction.
self._change_timer = QTimer()
self._change_timer.setSingleShot(True)
self._change_timer.setInterval(500)
self.determineAutoSlicing()
Preferences.getInstance().preferenceChanged.connect(self._onPreferencesChanged)
## Terminate the engine process.
#
# This function should terminate the engine process.
# Called when closing the application.
def close(self):
# Terminate CuraEngine if it is still running at this point
self._terminate()
## Get the command that is used to call the engine.
# This is useful for debugging and used to actually start the engine.
# \return list of commands and args / parameters.
def getEngineCommand(self):
json_path = Resources.getPath(Resources.DefinitionContainers, "fdmprinter.def.json")
return [Preferences.getInstance().getValue("backend/location"), "connect", "127.0.0.1:{0}".format(self._port), "-j", json_path, ""]
## Emitted when we get a message containing print duration and material amount.
# This also implies the slicing has finished.
# \param time The amount of time the print will take.
# \param material_amount The amount of material the print will use.
printDurationMessage = Signal()
## Emitted when the slicing process starts.
slicingStarted = Signal()
## Emitted when the slicing process is aborted forcefully.
slicingCancelled = Signal()
def setPrintTime(self, times, amounts):
self.printDurationMessage.emit(self._start_slice_job_build_plate, times, amounts)
@pyqtSlot()
def stopSlicing(self):
self.backendStateChange.emit(BackendState.NotStarted)
if self._slicing: # We were already slicing. Stop the old job.
self._terminate()
self._createSocket()
if self._process_layers_job: # We were processing layers. Stop that, the layers are going to change soon.
Logger.log("d", "Aborting process layers job...")
self._process_layers_job.abort()
self._process_layers_job = None
if self._error_message:
self._error_message.hide()
## Manually triggers a reslice
@pyqtSlot()
def forceSlice(self):
if self._use_timer:
self._change_timer.start()
else:
self.slice()
## Perform a slice of the scene.
def slice(self):
Logger.log("d", "Starting slice...")
self._slice_start_time = time()
if not self._build_plates_to_be_sliced:
self.processingProgress.emit(1.0)
Logger.log("w", "Slice unnecessary, nothing has changed that needs reslicing.")
return
if self._process_layers_job:
Logger.log("d", " ## Process layers job still busy, trying later")
return
if not hasattr(self._scene, "gcode_dict"):
self._scene.gcode_dict = {}
# see if we really have to slice
active_build_plate = Application.getInstance().getBuildPlateModel().activeBuildPlate
build_plate_to_be_sliced = self._build_plates_to_be_sliced.pop(0)
Logger.log("d", "Going to slice build plate [%s]!" % build_plate_to_be_sliced)
num_objects = self._numObjects()
if build_plate_to_be_sliced not in num_objects or num_objects[build_plate_to_be_sliced] == 0:
self._scene.gcode_dict[build_plate_to_be_sliced] = []
Logger.log("d", "Build plate %s has no objects to be sliced, skipping", build_plate_to_be_sliced)
if self._build_plates_to_be_sliced:
self.slice()
return
self._stored_layer_data = []
self._stored_optimized_layer_data[build_plate_to_be_sliced] = []
if Application.getInstance().getPrintInformation() and build_plate_to_be_sliced == active_build_plate:
Application.getInstance().getPrintInformation().setToZeroPrintInformation(build_plate_to_be_sliced)
if self._process is None:
self._createSocket()
self.stopSlicing()
self._engine_is_fresh = False # Yes we're going to use the engine
self.processingProgress.emit(0.0)
self.backendStateChange.emit(BackendState.NotStarted)
self._scene.gcode_dict[build_plate_to_be_sliced] = [] #[] indexed by build plate number
self._slicing = True
self.slicingStarted.emit()
self.determineAutoSlicing() # Switch timer on or off if appropriate
slice_message = self._socket.createMessage("cura.proto.Slice")
self._start_slice_job = StartSliceJob.StartSliceJob(slice_message)
self._start_slice_job_build_plate = build_plate_to_be_sliced
self._start_slice_job.setBuildPlate(self._start_slice_job_build_plate)
self._start_slice_job.start()
self._start_slice_job.finished.connect(self._onStartSliceCompleted)
def pauseSlicing(self):
self.close()
self._pause_slicing = True
self.backendStateChange.emit(BackendState.Disabled)
def continueSlicing(self):
if self._pause_slicing:
self._pause_slicing = False
self.backendStateChange.emit(BackendState.NotStarted)
## Terminate the engine process.
# Start the engine process by calling _createSocket()
def _terminate(self):
self._slicing = False
self._stored_layer_data = []
if self._start_slice_job_build_plate in self._stored_optimized_layer_data:
del self._stored_optimized_layer_data[self._start_slice_job_build_plate]
if self._start_slice_job is not None:
self._start_slice_job.cancel()
self.slicingCancelled.emit()
self.processingProgress.emit(0)
Logger.log("d", "Attempting to kill the engine process")
if Application.getInstance().getCommandLineOption("external-backend", False):
return
if self._process is not None:
Logger.log("d", "Killing engine process")
try:
self._process.terminate()
Logger.log("d", "Engine process is killed. Received return code %s", self._process.wait())
self._process = None
except Exception as e: # terminating a process that is already terminating causes an exception, silently ignore this.
Logger.log("d", "Exception occurred while trying to kill the engine %s", str(e))
## Event handler to call when the job to initiate the slicing process is
# completed.
#
# When the start slice job is successfully completed, it will be happily
# slicing. This function handles any errors that may occur during the
# bootstrapping of a slice job.
#
# \param job The start slice job that was just finished.
def _onStartSliceCompleted(self, job):
if self._error_message:
self._error_message.hide()
# Note that cancelled slice jobs can still call this method.
if self._start_slice_job is job:
self._start_slice_job = None
if job.isCancelled() or job.getError() or job.getResult() == StartSliceJob.StartJobResult.Error:
return
if job.getResult() == StartSliceJob.StartJobResult.MaterialIncompatible:
if Application.getInstance().platformActivity:
self._error_message = Message(catalog.i18nc("@info:status",
"Unable to slice with the current material as it is incompatible with the selected machine or configuration."),
title = catalog.i18nc("@info:title", "Unable to slice"), type = MessageType.Error)
self._error_message.show()
self.backendStateChange.emit(BackendState.Error)
else:
self.backendStateChange.emit(BackendState.NotStarted)
return
if job.getResult() == StartSliceJob.StartJobResult.SettingError:
if Application.getInstance().platformActivity:
extruders = list(ExtruderManager.getInstance().getMachineExtruders(self._global_container_stack.getId()))
error_keys = []
for extruder in extruders:
error_keys.extend(extruder.getErrorKeys())
if not extruders:
error_keys = self._global_container_stack.getErrorKeys()
error_labels = set()
for key in error_keys:
for stack in [self._global_container_stack] + extruders: #Search all container stacks for the definition of this setting. Some are only in an extruder stack.
definitions = stack.getBottom().findDefinitions(key = key)
if definitions:
break #Found it! No need to continue search.
else: #No stack has a definition for this setting.
Logger.log("w", "When checking settings for errors, unable to find definition for key: {key}".format(key = key))
continue
error_labels.add(definitions[0].label)
error_labels = ", ".join(error_labels)
self._error_message = Message(catalog.i18nc("@info:status", "Unable to slice with the current settings. The following settings have errors: {0}").format(error_labels),
title = catalog.i18nc("@info:title", "Unable to slice"), type = MessageType.Error)
self._error_message.show()
self.backendStateChange.emit(BackendState.Error)
else:
self.backendStateChange.emit(BackendState.NotStarted)
return
elif job.getResult() == StartSliceJob.StartJobResult.ObjectSettingError:
errors = {}
for node in DepthFirstIterator(Application.getInstance().getController().getScene().getRoot()):
stack = node.callDecoration("getStack")
if not stack:
continue
for key in stack.getErrorKeys():
definition = self._global_container_stack.getBottom().findDefinitions(key = key)
if not definition:
Logger.log("e", "When checking settings for errors, unable to find definition for key {key} in per-object stack.".format(key = key))
continue
definition = definition[0]
errors[key] = definition.label
error_labels = ", ".join(errors.values())
self._error_message = Message(catalog.i18nc("@info:status", "Unable to slice due to some per-model settings. The following settings have errors on one or more models: {error_labels}").format(error_labels = error_labels),
title = catalog.i18nc("@info:title", "Unable to slice"))
self._error_message.show()
self.backendStateChange.emit(BackendState.Error)
return
if job.getResult() == StartSliceJob.StartJobResult.BuildPlateError:
if Application.getInstance().platformActivity:
self._error_message = Message(catalog.i18nc("@info:status", "Unable to slice because the prime tower or prime position(s) are invalid."),
title = catalog.i18nc("@info:title", "Unable to slice"), type = MessageType.Error)
self._error_message.show()
self.backendStateChange.emit(BackendState.Error)
else:
self.backendStateChange.emit(BackendState.NotStarted)
if job.getResult() == StartSliceJob.StartJobResult.NothingToSlice:
if Application.getInstance().platformActivity:
self._error_message = Message(catalog.i18nc("@info:status", "Nothing to slice because none of the models fit the build volume. Please scale or rotate models to fit."),
title = catalog.i18nc("@info:title", "Unable to slice"), type = MessageType.Error)
self._error_message.show()
self.backendStateChange.emit(BackendState.Error)
else:
self.backendStateChange.emit(BackendState.NotStarted)
pass
self._invokeSlice()
return
# Preparation completed, send it to the backend.
self._socket.sendMessage(job.getSliceMessage())
# Notify the user that it's now up to the backend to do it's job
self.backendStateChange.emit(BackendState.Processing)
Logger.log("d", "Sending slice message took %s seconds", time() - self._slice_start_time )
## Determine enable or disable auto slicing. Return True for enable timer and False otherwise.
# It disables when
# - preference auto slice is off
# - decorator isBlockSlicing is found (used in g-code reader)
def determineAutoSlicing(self):
enable_timer = True
if not Preferences.getInstance().getValue("general/auto_slice"):
enable_timer = False
for node in DepthFirstIterator(self._scene.getRoot()):
if node.callDecoration("isBlockSlicing"):
enable_timer = False
self.backendStateChange.emit(BackendState.Disabled)
gcode_list = node.callDecoration("getGCodeList")
if gcode_list is not None:
self._scene.gcode_dict[node.callDecoration("getBuildPlateNumber")] = gcode_list
if self._use_timer == enable_timer:
return self._use_timer
if enable_timer:
self.backendStateChange.emit(BackendState.NotStarted)
self.enableTimer()
return True
else:
self.disableTimer()
return False
## Return a dict with number of objects per build plate
def _numObjects(self):
num_objects = defaultdict(int)
for node in DepthFirstIterator(self._scene.getRoot()):
# Only count sliceable objects
if node.callDecoration("isSliceable"):
build_plate_number = node.callDecoration("getBuildPlateNumber")
num_objects[build_plate_number] += 1
return num_objects
## Listener for when the scene has changed.
#
# This should start a slice if the scene is now ready to slice.
#
# \param source The scene node that was changed.
def _onSceneChanged(self, source):
if not isinstance(source, SceneNode):
return
# This case checks if the source node is a node that contains GCode. In this case the
# current layer data is removed so the previous data is not rendered - CURA-4821
if source.callDecoration("isBlockSlicing") and source.callDecoration("getLayerData"):
self._stored_optimized_layer_data = {}
build_plate_changed = set()
source_build_plate_number = source.callDecoration("getBuildPlateNumber")
if source == self._scene.getRoot():
# we got the root node
num_objects = self._numObjects()
for build_plate_number in list(self._last_num_objects.keys()) + list(num_objects.keys()):
if build_plate_number not in self._last_num_objects or num_objects[build_plate_number] != self._last_num_objects[build_plate_number]:
self._last_num_objects[build_plate_number] = num_objects[build_plate_number]
build_plate_changed.add(build_plate_number)
else:
# we got a single scenenode
if not source.callDecoration("isGroup"):
if source.getMeshData() is None:
return
if source.getMeshData().getVertices() is None:
return
build_plate_changed.add(source_build_plate_number)
build_plate_changed.discard(None)
build_plate_changed.discard(-1) # object not on build plate
if not build_plate_changed:
return
if self._tool_active:
# do it later, each source only has to be done once
if source not in self._postponed_scene_change_sources:
self._postponed_scene_change_sources.append(source)
return
self.stopSlicing()
for build_plate_number in build_plate_changed:
if build_plate_number not in self._build_plates_to_be_sliced:
self._build_plates_to_be_sliced.append(build_plate_number)
self.printDurationMessage.emit(source_build_plate_number, {}, [])
self.processingProgress.emit(0.0)
self.backendStateChange.emit(BackendState.NotStarted)
# if not self._use_timer:
# With manually having to slice, we want to clear the old invalid layer data.
self._clearLayerData(build_plate_changed)
self._invokeSlice()
## Called when an error occurs in the socket connection towards the engine.
#
# \param error The exception that occurred.
def _onSocketError(self, error):
if Application.getInstance().isShuttingDown():
return
super()._onSocketError(error)
if error.getErrorCode() == Arcus.ErrorCode.Debug:
return
self._terminate()
self._createSocket()
if error.getErrorCode() not in [Arcus.ErrorCode.BindFailedError, Arcus.ErrorCode.ConnectionResetError, Arcus.ErrorCode.Debug]:
Logger.log("w", "A socket error caused the connection to be reset")
## Remove old layer data (if any)
def _clearLayerData(self, build_plate_numbers = set()):
for node in DepthFirstIterator(self._scene.getRoot()):
if node.callDecoration("getLayerData"):
if not build_plate_numbers or node.callDecoration("getBuildPlateNumber") in build_plate_numbers:
node.getParent().removeChild(node)
def markSliceAll(self):
for build_plate_number in range(Application.getInstance().getBuildPlateModel().maxBuildPlate + 1):
if build_plate_number not in self._build_plates_to_be_sliced:
self._build_plates_to_be_sliced.append(build_plate_number)
## Convenient function: mark everything to slice, emit state and clear layer data
def needsSlicing(self):
self.stopSlicing()
self.markSliceAll()
self.processingProgress.emit(0.0)
self.backendStateChange.emit(BackendState.NotStarted)
if not self._use_timer:
# With manually having to slice, we want to clear the old invalid layer data.
self._clearLayerData()
## A setting has changed, so check if we must reslice.
# \param instance The setting instance that has changed.
# \param property The property of the setting instance that has changed.
def _onSettingChanged(self, instance, property):
if property == "value": # Only reslice if the value has changed.
self.needsSlicing()
self._onChanged()
elif property == "validationState":
if self._use_timer:
self._is_error_check_scheduled = True
self._change_timer.stop()
def _onStackErrorCheckFinished(self):
self._is_error_check_scheduled = False
if not self._slicing and self._build_plates_to_be_sliced:
self.needsSlicing()
self._onChanged()
## Called when a sliced layer data message is received from the engine.
#
# \param message The protobuf message containing sliced layer data.
def _onLayerMessage(self, message):
self._stored_layer_data.append(message)
## Called when an optimized sliced layer data message is received from the engine.
#
# \param message The protobuf message containing sliced layer data.
def _onOptimizedLayerMessage(self, message):
self._stored_optimized_layer_data[self._start_slice_job_build_plate].append(message)
## Called when a progress message is received from the engine.
#
# \param message The protobuf message containing the slicing progress.
def _onProgressMessage(self, message):
self.processingProgress.emit(message.amount)
self.backendStateChange.emit(BackendState.Processing)
# testing
def _invokeSlice(self):
if self._use_timer:
# if the error check is scheduled, wait for the error check finish signal to trigger auto-slice,
# otherwise business as usual
if self._is_error_check_scheduled:
self._change_timer.stop()
else:
self._change_timer.start()
## Called when the engine sends a message that slicing is finished.
#
# \param message The protobuf message signalling that slicing is finished.
def _onSlicingFinishedMessage(self, message):
self.backendStateChange.emit(BackendState.Done)
self.processingProgress.emit(1.0)
gcode_list = self._scene.gcode_dict[self._start_slice_job_build_plate]
for index, line in enumerate(gcode_list):
replaced = line.replace("{print_time}", str(Application.getInstance().getPrintInformation().currentPrintTime.getDisplayString(DurationFormat.Format.ISO8601)))
replaced = replaced.replace("{filament_amount}", str(Application.getInstance().getPrintInformation().materialLengths))
replaced = replaced.replace("{filament_weight}", str(Application.getInstance().getPrintInformation().materialWeights))
replaced = replaced.replace("{filament_cost}", str(Application.getInstance().getPrintInformation().materialCosts))
replaced = replaced.replace("{jobname}", str(Application.getInstance().getPrintInformation().jobName))
replaced = replaced.replace("M190 R0", "M105;")
replaced = replaced.replace("M190 S0", "M105;")
gcode_list[index] = replaced
self._slicing = False
Logger.log("d", "Slicing took %s seconds", time() - self._slice_start_time )
# See if we need to process the sliced layers job.
active_build_plate = Application.getInstance().getBuildPlateModel().activeBuildPlate
if self._layer_view_active and (self._process_layers_job is None or not self._process_layers_job.isRunning()) and active_build_plate == self._start_slice_job_build_plate:
self._startProcessSlicedLayersJob(active_build_plate)
# self._onActiveViewChanged()
self._start_slice_job_build_plate = None
Logger.log("d", "See if there is more to slice...")
# Somehow this results in an Arcus Error
# self.slice()
# Call slice again using the timer, allowing the backend to restart
if self._build_plates_to_be_sliced:
self.enableTimer() # manually enable timer to be able to invoke slice, also when in manual slice mode
self._invokeSlice()
## Called when a g-code message is received from the engine.
#
# \param message The protobuf message containing g-code, encoded as UTF-8.
def _onGCodeLayerMessage(self, message):
self._scene.gcode_dict[self._start_slice_job_build_plate].append(message.data.decode("utf-8", "replace"))
## Called when a g-code prefix message is received from the engine.
#
# \param message The protobuf message containing the g-code prefix,
# encoded as UTF-8.
def _onGCodePrefixMessage(self, message):
self._scene.gcode_dict[self._start_slice_job_build_plate].insert(0, message.data.decode("utf-8", "replace"))
## Creates a new socket connection.
def _createSocket(self):
super()._createSocket(os.path.abspath(os.path.join(PluginRegistry.getInstance().getPluginPath(self.getPluginId()), "Cura.proto")))
self._engine_is_fresh = True
## Called when anything has changed to the stuff that needs to be sliced.
#
# This indicates that we should probably re-slice soon.
def _onChanged(self, *args, **kwargs):
self.needsSlicing()
if self._use_timer:
# if the error check is scheduled, wait for the error check finish signal to trigger auto-slice,
# otherwise business as usual
if self._is_error_check_scheduled:
self._change_timer.stop()
else:
self._change_timer.start()
## Called when a print time message is received from the engine.
#
# \param message The protobuf message containing the print time per feature and
# material amount per extruder
def _onPrintTimeMaterialEstimates(self, message):
material_amounts = []
for index in range(message.repeatedMessageCount("materialEstimates")):
material_amounts.append(message.getRepeatedMessage("materialEstimates", index).material_amount)
times = self._parseMessagePrintTimes(message)
self.printDurationMessage.emit(self._start_slice_job_build_plate, times, material_amounts)
## Called for parsing message to retrieve estimated time per feature
#
# \param message The protobuf message containing the print time per feature
def _parseMessagePrintTimes(self, message):
result = {
"inset_0": message.time_inset_0,
"inset_x": message.time_inset_x,
"skin": message.time_skin,
"infill": message.time_infill,
"support_infill": message.time_support_infill,
"support_interface": message.time_support_interface,
"support": message.time_support,
"skirt": message.time_skirt,
"travel": message.time_travel,
"retract": message.time_retract,
"none": message.time_none
}
return result
## Called when the back-end connects to the front-end.
def _onBackendConnected(self):
if self._restart:
self._restart = False
self._onChanged()
## Called when the user starts using some tool.
#
# When the user starts using a tool, we should pause slicing to prevent
# continuously slicing while the user is dragging some tool handle.
#
# \param tool The tool that the user is using.
def _onToolOperationStarted(self, tool):
self._tool_active = True # Do not react on scene change
self.disableTimer()
# Restart engine as soon as possible, we know we want to slice afterwards
if not self._engine_is_fresh:
self._terminate()
self._createSocket()
## Called when the user stops using some tool.
#
# This indicates that we can safely start slicing again.
#
# \param tool The tool that the user was using.
def _onToolOperationStopped(self, tool):
self._tool_active = False # React on scene change again
self.determineAutoSlicing() # Switch timer on if appropriate
# Process all the postponed scene changes
while self._postponed_scene_change_sources:
source = self._postponed_scene_change_sources.pop(0)
self._onSceneChanged(source)
def _startProcessSlicedLayersJob(self, build_plate_number):
self._process_layers_job = ProcessSlicedLayersJob.ProcessSlicedLayersJob(self._stored_optimized_layer_data[build_plate_number])
self._process_layers_job.setBuildPlate(build_plate_number)
self._process_layers_job.finished.connect(self._onProcessLayersFinished)
self._process_layers_job.start()
## Called when the user changes the active view mode.
def _onActiveViewChanged(self):
application = Application.getInstance()
view = application.getController().getActiveView()
if view:
active_build_plate = application.getBuildPlateModel().activeBuildPlate
if view.getPluginId() == "SimulationView": # If switching to layer view, we should process the layers if that hasn't been done yet.
self._layer_view_active = True
# There is data and we're not slicing at the moment
# if we are slicing, there is no need to re-calculate the data as it will be invalid in a moment.
# TODO: what build plate I am slicing
if active_build_plate in self._stored_optimized_layer_data and not self._slicing and not self._process_layers_job:
self._startProcessSlicedLayersJob(active_build_plate)
else:
self._layer_view_active = False
## Called when the back-end self-terminates.
#
# We should reset our state and start listening for new connections.
def _onBackendQuit(self):
if not self._restart:
if self._process:
Logger.log("d", "Backend quit with return code %s. Resetting process and socket.", self._process.wait())
self._process = None
## Called when the global container stack changes
def _onGlobalStackChanged(self):
if self._global_container_stack:
self._global_container_stack.propertyChanged.disconnect(self._onSettingChanged)
self._global_container_stack.containersChanged.disconnect(self._onChanged)
extruders = list(self._global_container_stack.extruders.values())
for extruder in extruders:
extruder.propertyChanged.disconnect(self._onSettingChanged)
extruder.containersChanged.disconnect(self._onChanged)
self._global_container_stack = Application.getInstance().getGlobalContainerStack()
if self._global_container_stack:
self._global_container_stack.propertyChanged.connect(self._onSettingChanged) # Note: Only starts slicing when the value changed.
self._global_container_stack.containersChanged.connect(self._onChanged)
extruders = list(self._global_container_stack.extruders.values())
for extruder in extruders:
extruder.propertyChanged.connect(self._onSettingChanged)
extruder.containersChanged.connect(self._onChanged)
self._onChanged()
def _onProcessLayersFinished(self, job):
del self._stored_optimized_layer_data[job.getBuildPlate()]
self._process_layers_job = None
Logger.log("d", "See if there is more to slice(2)...")
self._invokeSlice()
## Connect slice function to timer.
def enableTimer(self):
if not self._use_timer:
self._change_timer.timeout.connect(self.slice)
self._use_timer = True
## Disconnect slice function from timer.
# This means that slicing will not be triggered automatically
def disableTimer(self):
if self._use_timer:
self._use_timer = False
self._change_timer.timeout.disconnect(self.slice)
def _onPreferencesChanged(self, preference):
if preference != "general/auto_slice":
return
auto_slice = self.determineAutoSlicing()
if auto_slice:
self._change_timer.start()
## Tickle the backend so in case of auto slicing, it starts the timer.
def tickle(self):
if self._use_timer:
self._change_timer.start()
| alephobjects/Cura2 | plugins/CuraEngineBackend/CuraEngineBackend.py | Python | lgpl-3.0 | 38,736 |
# MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import unittest
import tensorflow as tf
import numpy as np
import facenet
class CenterLossTest(unittest.TestCase):
def testCenterLoss(self):
batch_size = 16
nrof_features = 2
nrof_classes = 16
alfa = 0.5
with tf.Graph().as_default():
features = tf.placeholder(tf.float32, shape=(batch_size, nrof_features), name='features')
labels = tf.placeholder(tf.int32, shape=(batch_size,), name='labels')
# Define center loss
center_loss, centers = facenet.center_loss(features, labels, alfa, nrof_classes)
label_to_center = np.array( [
[-3,-3], [-3,-1], [-3,1], [-3,3],
[-1,-3], [-1,-1], [-1,1], [-1,3],
[ 1,-3], [ 1,-1], [ 1,1], [ 1,3],
[ 3,-3], [ 3,-1], [ 3,1], [ 3,3]
])
sess = tf.Session()
with sess.as_default():
sess.run(tf.global_variables_initializer())
np.random.seed(seed=666)
for _ in range(0,100):
# Create array of random labels
lbls = np.random.randint(low=0, high=nrof_classes, size=(batch_size,))
feats = create_features(label_to_center, batch_size, nrof_features, lbls)
center_loss_, centers_ = sess.run([center_loss, centers], feed_dict={features:feats, labels:lbls})
# After a large number of updates the estimated centers should be close to the true ones
np.testing.assert_almost_equal(centers_, label_to_center, decimal=5, err_msg='Incorrect estimated centers')
np.testing.assert_almost_equal(center_loss_, 0.0, decimal=5, err_msg='Incorrect center loss')
def create_features(label_to_center, batch_size, nrof_features, labels):
# Map label to center
# label_to_center_dict = {
# 0:(-3,-3), 1:(-3,-1), 2:(-3,1), 3:(-3,3),
# 4:(-1,-3), 5:(-1,-1), 6:(-1,1), 7:(-1,3),
# 8:( 1,-3), 9:( 1,-1), 10:( 1,1), 11:( 1,3),
# 12:( 3,-3), 13:( 3,-1), 14:( 3,1), 15:( 3,3),
# }
# Create array of features corresponding to the labels
feats = np.zeros((batch_size, nrof_features))
for i in range(batch_size):
cntr = label_to_center[labels[i]]
for j in range(nrof_features):
feats[i,j] = cntr[j]
return feats
if __name__ == "__main__":
unittest.main()
| liuzz1983/open_vision | test/center_loss_test.py | Python | mit | 3,706 |
""" $lic$
Copyright (C) 2016-2020 by Tsinghua University and The Board of Trustees of
Stanford University
This program is free software: you can redistribute it and/or modify it under
the terms of the Modified BSD-3 License as published by the Open Source
Initiative.
This program is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the BSD-3 License for more details.
You should have received a copy of the Modified BSD-3 License along with this
program. If not, see <https://opensource.org/licenses/BSD-3-Clause>.
"""
from nn_dataflow.core import Network
from nn_dataflow.core import InputLayer, ConvLayer, FCLayer, \
PoolingLayer, EltwiseLayer
'''
ResNet-50
He, Zhang, Ren, and Sun, 2015
'''
NN = Network('ResNet')
NN.set_input_layer(InputLayer(3, 224))
NN.add('conv1', ConvLayer(3, 64, 112, 7, 2))
NN.add('pool1', PoolingLayer(64, 56, 3, 2))
RES_PREV = 'pool1'
for i in range(3):
NN.add('conv2_{}_a'.format(i), ConvLayer(64 if i == 0 else 256, 64, 56, 1))
NN.add('conv2_{}_b'.format(i), ConvLayer(64, 64, 56, 3))
NN.add('conv2_{}_c'.format(i), ConvLayer(64, 256, 56, 1))
# With residual shortcut.
if i == 0:
NN.add('conv2_br', ConvLayer(64, 256, 56, 1), prevs=(RES_PREV,))
RES_PREV = 'conv2_br'
NN.add('conv2_{}_res'.format(i), EltwiseLayer(256, 56, 2),
prevs=(RES_PREV, 'conv2_{}_c'.format(i)))
RES_PREV = 'conv2_{}_res'.format(i)
for i in range(4):
NN.add('conv3_{}_a'.format(i),
ConvLayer(256, 128, 28, 1, 2) if i == 0
else ConvLayer(512, 128, 28, 1))
NN.add('conv3_{}_b'.format(i), ConvLayer(128, 128, 28, 3))
NN.add('conv3_{}_c'.format(i), ConvLayer(128, 512, 28, 1))
# With residual shortcut.
if i == 0:
NN.add('conv3_br', ConvLayer(256, 512, 28, 1, 2), prevs=(RES_PREV,))
RES_PREV = 'conv3_br'
NN.add('conv3_{}_res'.format(i), EltwiseLayer(512, 28, 2),
prevs=(RES_PREV, 'conv3_{}_c'.format(i)))
RES_PREV = 'conv3_{}_res'.format(i)
for i in range(6):
NN.add('conv4_{}_a'.format(i),
ConvLayer(512, 256, 14, 1, 2) if i == 0
else ConvLayer(1024, 256, 14, 1))
NN.add('conv4_{}_b'.format(i), ConvLayer(256, 256, 14, 3))
NN.add('conv4_{}_c'.format(i), ConvLayer(256, 1024, 14, 1))
# With residual shortcut.
if i == 0:
NN.add('conv4_br', ConvLayer(512, 1024, 14, 1, 2), prevs=(RES_PREV,))
RES_PREV = 'conv4_br'
NN.add('conv4_{}_res'.format(i), EltwiseLayer(1024, 14, 2),
prevs=(RES_PREV, 'conv4_{}_c'.format(i)))
RES_PREV = 'conv4_{}_res'.format(i)
for i in range(3):
NN.add('conv5_{}_a'.format(i),
ConvLayer(1024, 512, 7, 1, 2) if i == 0
else ConvLayer(2048, 512, 7, 1))
NN.add('conv5_{}_b'.format(i), ConvLayer(512, 512, 7, 3))
NN.add('conv5_{}_c'.format(i), ConvLayer(512, 2048, 7, 1))
# With residual shortcut.
if i == 0:
NN.add('conv5_br', ConvLayer(1024, 2048, 7, 1, 2), prevs=(RES_PREV,))
RES_PREV = 'conv5_br'
NN.add('conv5_{}_res'.format(i), EltwiseLayer(2048, 7, 2),
prevs=(RES_PREV, 'conv5_{}_c'.format(i)))
RES_PREV = 'conv5_{}_res'.format(i)
NN.add('pool5', PoolingLayer(2048, 1, 7))
NN.add('fc', FCLayer(2048, 1000))
| stanford-mast/nn_dataflow | nn_dataflow/nns/resnet50.py | Python | bsd-3-clause | 3,367 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
# pylint: disable=unused-import
# pylint: disable=line-too-long
from ._help import helps
class EventhubCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core import ModExtensionSuppress
from azure.cli.core.commands import CliCommandType
from azure.cli.core.profiles import ResourceType
eventhub_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.eventhubs.custom#{}')
super(EventhubCommandsLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=eventhub_custom,
resource_type=ResourceType.MGMT_EVENTHUB,
suppress_extension=ModExtensionSuppress(__name__, 'eventhubs', '0.0.1',
reason='These commands are now in the CLI.',
recommend_remove=True))
def load_command_table(self, args):
from azure.cli.command_modules.eventhubs.commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from azure.cli.command_modules.eventhubs._params import load_arguments_eh
load_arguments_eh(self, command)
COMMAND_LOADER_CLS = EventhubCommandsLoader
| yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/eventhubs/__init__.py | Python | mit | 1,890 |
"""Config flow to configure flood monitoring gauges."""
from aioeafm import get_stations
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
class UKFloodsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a UK Environment Agency flood monitoring config flow."""
VERSION = 1
def __init__(self):
"""Handle a UK Floods config flow."""
self.stations = {}
async def async_step_user(self, user_input=None):
"""Handle a flow start."""
errors = {}
if user_input is not None:
station = self.stations[user_input["station"]]
await self.async_set_unique_id(station, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input["station"],
data={"station": station},
)
session = async_get_clientsession(hass=self.hass)
stations = await get_stations(session)
self.stations = {}
for station in stations:
label = station["label"]
# API annoyingly sometimes returns a list and some times returns a string
# E.g. L3121 has a label of ['Scurf Dyke', 'Scurf Dyke Dyke Level']
if isinstance(label, list):
label = label[-1]
self.stations[label] = station["stationReference"]
if not self.stations:
return self.async_abort(reason="no_stations")
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{vol.Required("station"): vol.In(sorted(self.stations))}
),
)
| jawilson/home-assistant | homeassistant/components/eafm/config_flow.py | Python | apache-2.0 | 1,816 |
from __future__ import division
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import os
import sys
mydir = os.path.expanduser('~/GitHub/Micro-Encounter')
sys.path.append(mydir+'/tools')
mydir2 = os.path.expanduser("~/")
dat = pd.read_csv(mydir + '/results/simulated_data/SimData.csv')
dat = dat.convert_objects(convert_numeric=True).dropna()
#-------------------------DATA FILTERS------------------------------------------
#dat = dat[dat['SpatialComplexityLevel'] == 3]
#dat = dat[dat['ResourceComplexityLevel'] == 1]
dat = dat[dat['TrophicComplexityLevel'] != 2]
#dat = dat[dat['IncomingResAgg'] < 0.2]
#dat = dat[dat['MaxMetMaint'] < 0.005]
#dat = dat[dat['ResInflow'] < 18]
#-------------------------------------------------------------------------------
ComplexityLevels = ['res', 'troph', 'spatial']
for level in ComplexityLevels:
#### plot figure ###########################################################
lims = 'y'
fs = 8 # fontsize
fig = plt.figure()
if level == 'res':
dat1 = dat[dat['ResourceComplexityLevel'] == 1]
dat2 = dat[dat['ResourceComplexityLevel'] == 2]
dat3 = dat[dat['ResourceComplexityLevel'] == 3]
label1 = 'No diversity, No complexity'
label2 = 'No complexity'
label3 = 'Diversity + Complexity'
if level == 'troph':
dat1 = dat[dat['TrophicComplexityLevel'] == 1]
dat2 = dat[dat['TrophicComplexityLevel'] == 2]
dat3 = dat[dat['TrophicComplexityLevel'] == 3]
label1 = 'No trophic complexity'
label2 = 'Trophic complexity'
label3 = 'Recycling'
if level == 'spatial':
dat1 = dat[dat['SpatialComplexityLevel'] == 1]
dat2 = dat[dat['SpatialComplexityLevel'] == 2]
dat3 = dat[dat['SpatialComplexityLevel'] == 3]
label1 = 'White noise'
label2 = 'Aggregated w/ Random walks'
label3 = 'Aggregated w/ chemotaxis'
#### PLOT 1 #################################################################
fig.add_subplot(2, 2, 1)
xlab = 'Resource concentration'
ylab = 'Resource aggregation'
plt.scatter(dat1['MeanResourceConcentration'], dat1['MeanResAgg'], color = 'm', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k', label=label1)
plt.scatter(dat2['MeanResourceConcentration'], dat2['MeanResAgg'], color = 'steelblue', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k', label=label2)
plt.scatter(dat3['MeanResourceConcentration'], dat3['MeanResAgg'], color = 'goldenrod', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k', label=label3)
plt.ylabel(ylab, fontsize=fs+5)
plt.xlabel(xlab, fontsize=fs+5)
plt.yscale('log')
plt.xscale('log')
if lims == 'y':
plt.ylim(0.1, 1000)
plt.xlim(0.001, 4.0)
plt.tick_params(axis='both', which='major', labelsize=fs)
plt.legend(bbox_to_anchor=(-0.04, 1.05, 2.48, .2), loc=10, ncol=3, mode="expand",prop={'size':fs})
'''
a = plt.axes([0.17, 0.78, 0.1, 0.1], axisbg='w')
plt.scatter(dat1['VarResourceConcentration'], dat1['VarIndAgg'], color = 'm', alpha = 0.7 )
plt.scatter(dat2['VarResourceConcentration'], dat2['VarIndAgg'], color = 'steelblue', alpha = 0.8)
plt.scatter(dat3['VarResourceConcentration'], dat3['VarIndAgg'], color = 'goldenrod', alpha = 0.8)
#plt.set_xlim(0.5*min(dat1['VarResourceConcentration']), 2*max(dat1['VarResourceConcentration']))
#plt.set_ylim(0.5*min(dat1['VarIndAgg']), 2*max(dat1['VarIndAgg']))
plt.title('Probability')
plt.xticks([])
plt.yticks([])
'''
#### PLOT 2 ################################
fig.add_subplot(2, 2, 2)
ylab = 'Avg Encounters'
xlab = 'Resource aggregation'
plt.scatter(dat1['MeanResAgg'], dat1['MeanEncounter'], color = 'm', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat2['MeanResAgg'], dat2['MeanEncounter'], color = 'steelblue', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat3['MeanResAgg'], dat3['MeanEncounter'], color = 'goldenrod', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.ylabel(ylab, fontsize=fs+5)
plt.xlabel(xlab, fontsize=fs+5)
plt.xscale('log')
plt.yscale('log')
if lims == 'y':
plt.ylim(0.01, 60)
plt.xlim(0.1, 1000)
plt.tick_params(axis='both', which='major', labelsize=fs)
#### PLOT 3 #################################################################
fig.add_subplot(2, 2, 3)
ylab = 'Percent Dormant'
xlab = 'Resource aggregation'
plt.scatter(dat1['MeanResAgg'], dat1['MeanDormFreq'], color = 'm', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat2['MeanResAgg'], dat2['MeanDormFreq'], color = 'steelblue', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat3['MeanResAgg'], dat3['MeanDormFreq'], color = 'goldenrod', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.ylabel(ylab, fontsize=fs+5)
plt.xlabel(xlab, fontsize=fs+5)
plt.xscale('log')
if lims == 'y':
plt.ylim(0.0, 1.05)
plt.xlim(0.1, 1000)
plt.tick_params(axis='both', which='major', labelsize=fs)
#### PLOT 4 #################################################################
fig.add_subplot(2, 2, 4)
ylab = 'Productivity'
xlab = 'Resource aggregation'
plt.scatter(dat1['MeanResAgg'], dat1['MeanIndProduction'], color = 'm', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat2['MeanResAgg'], dat2['MeanIndProduction'], color = 'steelblue', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.scatter(dat3['MeanResAgg'], dat3['MeanIndProduction'], color = 'goldenrod', alpha = 0.7 , s = 10, linewidths = 0.0, edgecolor = 'k')
plt.ylabel(ylab, fontsize=fs+5)
plt.xlabel(xlab, fontsize=fs+5)
plt.yscale('log')
plt.xscale('log')
if lims == 'y':
plt.ylim(0.01, 40)
plt.xlim(0.1, 1000)
plt.tick_params(axis='both', which='major', labelsize=fs)
#### Final Format and Save #####################################################
plt.subplots_adjust(wspace=0.4, hspace=0.4)
if level == 'spatial':
plt.savefig(mydir + '/results/figures/Aggregation-SpatialComplexity-NoTrophicLevel2.png', dpi=600, bbox_inches = "tight")
elif level == 'res':
plt.savefig(mydir + '/results/figures/Aggregation-ResourceComplexity-NoTrophicLevel2.png', dpi=600, bbox_inches = "tight")
elif level == 'troph':
plt.savefig(mydir + '/results/figures/Aggregation-TrophicComplexity-NoTrophicLevel2.png', dpi=600, bbox_inches = "tight")
#plt.show()
plt.close()
| LennonLab/Micro-Encounter | fig-scripts/OLD-fig-scripts/AggFig.py | Python | gpl-3.0 | 6,688 |
# -*- coding: utf-8 -*-
#
# Copyright © 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
'''
pkgdb tests.
'''
__requires__ = ['SQLAlchemy >= 0.7']
import pkg_resources
import unittest
import sys
import os
from contextlib import contextmanager
from flask import appcontext_pushed, g
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.abspath(__file__)), '..'))
from pkgdb2 import APP, FAS, LOG
from pkgdb2.lib import model
#DB_PATH = 'sqlite:///:memory:'
## A file database is required to check the integrity, don't ask
DB_PATH = 'sqlite:////tmp/test.sqlite'
FAITOUT_URL = 'http://faitout.fedorainfracloud.org/'
if os.environ.get('BUILD_ID'):
try:
import requests
req = requests.get('%s/new' % FAITOUT_URL)
if req.status_code == 200:
DB_PATH = req.text
print 'Using faitout at: %s' % DB_PATH
except:
pass
LOG.handlers = []
class FakeFasUser(object):
""" Fake FAS user used for the tests. """
id = 100
username = 'pingou'
cla_done = True
groups = ['packager', 'cla_done']
bugzilla_email = 'pingou@pingoured.fr'
class FakeFasUserAdmin(object):
""" Fake FAS user used for the tests. """
id = 1000
username = 'admin'
cla_done = True
groups = ['packager', 'cla_done', 'sysadmin-cvs']
class FakeFasGroupValid(object):
""" Fake FAS Group used for the tests. """
id = 10000
name = 'perl-sig'
group_type = 'pkgdb'
class FakeFasGroupInvalid(object):
""" Fake FAS Group used for the tests. """
id = 10001
name = 'perl'
group_type = 'tracking'
@contextmanager
def user_set(APP, user):
""" Set the provided user as fas_user in the provided application."""
# Hack used to remove the before_request function set by
# flask.ext.fas_openid.FAS which otherwise kills our effort to set a
# flask.g.fas_user.
APP.before_request_funcs[None] = []
def handler(sender, **kwargs):
g.fas_user = user
with appcontext_pushed.connected_to(handler, APP):
yield
class Modeltests(unittest.TestCase):
""" Model tests. """
def __init__(self, method_name='runTest'):
""" Constructor. """
unittest.TestCase.__init__(self, method_name)
self.session = None
# pylint: disable=C0103
def setUp(self):
""" Set up the environnment, ran before every tests. """
if '///' in DB_PATH:
dbfile = DB_PATH.split('///')[1]
if os.path.exists(dbfile):
os.unlink(dbfile)
self.session = model.create_tables(DB_PATH, debug=False)
# Create the docker namespace
obj = model.Namespace('docker')
self.session.add(obj)
self.session.commit()
APP.before_request(FAS._check_session)
# pylint: disable=C0103
def tearDown(self):
""" Remove the test.db database if there is one. """
if '///' in DB_PATH:
dbfile = DB_PATH.split('///')[1]
if os.path.exists(dbfile):
os.unlink(dbfile)
self.session.rollback()
self.session.close()
if DB_PATH.startswith('postgres'):
if 'localhost' in DB_PATH:
model.drop_tables(DB_PATH, self.session.bind)
else:
db_name = DB_PATH.rsplit('/', 1)[1]
req = requests.get(
'%s/clean/%s' % (FAITOUT_URL, db_name))
print req.text
def create_collection(session):
""" Create some basic collection for testing. """
collection = model.Collection(
name='Fedora',
version='17',
status='Active',
owner='toshio',
branchname='f17',
dist_tag='.fc17',
)
session.add(collection)
collection = model.Collection(
name='Fedora',
version='18',
status='Active',
owner='toshio',
branchname='f18',
dist_tag='.fc18',
)
session.add(collection)
collection = model.Collection(
name='Fedora',
version='devel',
status='Under Development',
owner='kevin',
branchname='master',
dist_tag='devel',
allow_retire=True,
)
session.add(collection)
collection = model.Collection(
name='Fedora EPEL',
version='6',
status='Active',
owner='kevin',
branchname='el6',
dist_tag='.el6',
)
session.add(collection)
collection = model.Collection(
name='Fedora EPEL',
version='4',
status='EOL',
owner='kevin',
branchname='el4',
dist_tag='.el4',
)
session.add(collection)
session.commit()
def create_package(session):
""" Create some basic package for testing. """
package = model.Package(
name='guake',
namespace='rpms',
summary='Top down terminal for GNOME',
description='Top down terminal...',
status='Approved',
review_url='https://bugzilla.redhat.com/450189',
upstream_url='http://guake.org',
monitor=False,
)
session.add(package)
package = model.Package(
name='fedocal',
namespace='rpms',
summary='A web-based calendar for Fedora',
description='Web calendar ...',
status='Approved',
review_url='https://bugzilla.redhat.com/915074',
upstream_url='http://fedorahosted.org/fedocal',
monitor=False,
)
session.add(package)
package = model.Package(
name='geany',
namespace='rpms',
summary='A fast and lightweight IDE using GTK2',
description='Lightweight GNOME IDE...',
status='Approved',
review_url=None,
upstream_url=None,
monitor=False,
)
session.add(package)
package = model.Package(
name='offlineimap',
namespace='docker',
summary='Powerful IMAP/Maildir synchronization and reader support',
description='Powerful IMAP/Maildir synchronization...',
status='Approved',
review_url=None,
upstream_url=None,
monitor=False,
)
session.add(package)
session.commit()
def create_package_listing(session):
""" Add some package to a some collection. """
create_collection(session)
create_package(session)
guake_pkg = model.Package.by_name(session, 'rpms', 'guake')
fedocal_pkg = model.Package.by_name(session, 'rpms', 'fedocal')
geany_pkg = model.Package.by_name(session, 'rpms', 'geany')
offlineimap_pkg = model.Package.by_name(session, 'docker', 'offlineimap')
f17_collec = model.Collection.by_name(session, 'f17')
f18_collec = model.Collection.by_name(session, 'f18')
devel_collec = model.Collection.by_name(session, 'master')
el4_collec = model.Collection.by_name(session, 'el4')
# Pkg: guake - Collection: F18 - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=guake_pkg.id,
collection_id=f18_collec.id,
)
session.add(pkgltg)
# Pkg: guake - Collection: devel - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=guake_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: F17 - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=fedocal_pkg.id,
collection_id=f17_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: F18 - Orphaned
pkgltg = model.PackageListing(
point_of_contact='orphan',
status='Orphaned',
package_id=fedocal_pkg.id,
collection_id=f18_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: devel - Retired
pkgltg = model.PackageListing(
point_of_contact='orphan',
status='Retired',
package_id=fedocal_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
# Pkg: geany - Collection: F18 - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=geany_pkg.id,
collection_id=f18_collec.id,
)
session.add(pkgltg)
# Pkg: geany - Collection: devel - Approved
pkgltg = model.PackageListing(
point_of_contact='group::gtk-sig',
status='Approved',
package_id=geany_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
# Pkg: offlineimap - Collection: el4 - Approved
pkgltg = model.PackageListing(
point_of_contact='dodji',
status='Approved',
package_id=offlineimap_pkg.id,
collection_id=el4_collec.id,
)
session.add(pkgltg)
# Pkg: offlineimap - Collection: devel - Approved
pkgltg = model.PackageListing(
point_of_contact='josef',
status='Approved',
package_id=offlineimap_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
session.commit()
def create_package_critpath(session):
""" Create package in critpath. """
package = model.Package(
name='kernel',
namespace='rpms',
summary='The Linux kernel',
description='The kernel',
status='Approved',
review_url='https://bugzilla.redhat.com/123',
upstream_url='http://www.kernel.org/',
monitor=True,
koschei=True,
)
session.add(package)
f18_collec = model.Collection.by_name(session, 'f18')
devel_collec = model.Collection.by_name(session, 'master')
# Pkg: geany - Collection: F18 - Approved
pkgltg = model.PackageListing(
point_of_contact='kernel-maint',
status='Approved',
package_id=package.id,
collection_id=f18_collec.id,
critpath=True,
)
session.add(pkgltg)
# Pkg: geany - Collection: devel - Approved
pkgltg = model.PackageListing(
point_of_contact='group::kernel-maint',
status='Approved',
package_id=package.id,
collection_id=devel_collec.id,
critpath=True,
)
session.add(pkgltg)
session.commit()
def create_package_acl(session):
""" Add packagers to packages. """
create_package_listing(session)
guake_pkg = model.Package.by_name(session, 'rpms', 'guake')
geany_pkg = model.Package.by_name(session, 'rpms', 'geany')
offlineimap_pkg = model.Package.by_name(session, 'docker', 'offlineimap')
el4_collec = model.Collection.by_name(session, 'el4')
f18_collec = model.Collection.by_name(session, 'f18')
devel_collec = model.Collection.by_name(session, 'master')
pklist_guake_f18 = model.PackageListing.by_pkgid_collectionid(
session, guake_pkg.id, f18_collec.id)
pklist_guake_devel = model.PackageListing.by_pkgid_collectionid(
session, guake_pkg.id, devel_collec.id)
pkglist_geany_devel = model.PackageListing.by_pkgid_collectionid(
session, geany_pkg.id, devel_collec.id)
pkglist_offlineimap_el4 = model.PackageListing.by_pkgid_collectionid(
session, offlineimap_pkg.id, el4_collec.id)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_f18.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_f18.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='approveacls',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='toshio',
packagelisting_id=pklist_guake_devel.id,
acl='commit',
status='Awaiting Review',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='ralph',
packagelisting_id=pklist_guake_devel.id,
acl='approveacls',
status='Awaiting Review',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::gtk-sig',
packagelisting_id=pkglist_geany_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='josef',
packagelisting_id=pkglist_geany_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='josef',
packagelisting_id=pkglist_geany_devel.id,
acl='approveacls',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='josef',
packagelisting_id=pkglist_geany_devel.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
# offlineimap
packager = model.PackageListingAcl(
fas_name='dodji',
packagelisting_id=pkglist_offlineimap_el4.id,
status='Approved',
acl='commit',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='dodji',
packagelisting_id=pkglist_offlineimap_el4.id,
acl='approveacls',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='dodji',
packagelisting_id=pkglist_offlineimap_el4.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
session.commit()
def create_package_acl2(session):
""" Add packagers to packages. """
create_package_listing(session)
guake_pkg = model.Package.by_name(session, 'rpms', 'guake')
fedocal_pkg = model.Package.by_name(session, 'rpms', 'fedocal')
geany_pkg = model.Package.by_name(session, 'rpms', 'geany')
f17_collec = model.Collection.by_name(session, 'f17')
f18_collec = model.Collection.by_name(session, 'f18')
devel_collec = model.Collection.by_name(session, 'master')
pklist_guake_f18 = model.PackageListing.by_pkgid_collectionid(
session, guake_pkg.id, f18_collec.id)
pklist_guake_devel = model.PackageListing.by_pkgid_collectionid(
session, guake_pkg.id, devel_collec.id)
pkglist_geany_devel = model.PackageListing.by_pkgid_collectionid(
session, geany_pkg.id, devel_collec.id)
pkglist_fedocal_devel = model.PackageListing.by_pkgid_collectionid(
session, fedocal_pkg.id, devel_collec.id)
pkglist_fedocal_f18 = model.PackageListing.by_pkgid_collectionid(
session, fedocal_pkg.id, f18_collec.id)
pkglist_fedocal_f17 = model.PackageListing.by_pkgid_collectionid(
session, fedocal_pkg.id, f17_collec.id)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_f18.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_f18.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='watchcommits',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pklist_guake_devel.id,
acl='watchbugzilla',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='toshio',
packagelisting_id=pklist_guake_devel.id,
acl='commit',
status='Awaiting Review',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='spot',
packagelisting_id=pklist_guake_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='spot',
packagelisting_id=pklist_guake_devel.id,
acl='watchbugzilla',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::gtk-sig',
packagelisting_id=pkglist_geany_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::gtk-sig',
packagelisting_id=pkglist_geany_devel.id,
acl='watchbugzilla',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_geany_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_fedocal_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='toshio',
packagelisting_id=pkglist_fedocal_devel.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_fedocal_f18.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_fedocal_f18.id,
acl='watchbugzilla',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_fedocal_f17.id,
acl='commit',
status='Approved',
)
session.add(packager)
packager = model.PackageListingAcl(
fas_name='pingou',
packagelisting_id=pkglist_fedocal_f17.id,
acl='watchbugzilla',
status='Approved',
)
session.add(packager)
session.commit()
def create_admin_actions(session, n=1):
""" Add an Admin Actions for the tests. """
guake_pkg = model.Package.by_name(session, 'rpms', 'guake')
el6_collec = model.Collection.by_name(session, 'el6')
action = model.AdminAction(
package_id=guake_pkg.id,
collection_id=el6_collec.id,
user='ralph',
_status='Pending',
action='request.branch',
)
session.add(action)
action = model.AdminAction(
info='{"pkg_summary": "Busybox version suited for Mindi", '
'"pkg_status": "Approved", "pkg_collection": "master", '
'"pkg_name": "mindi-busybox", "pkg_review_url": '
'"https://bugzilla.redhat.com/bugzilla/show_bug.cgi?id=476234", '
'"pkg_description": "", "pkg_upstream_url": "", "pkg_poc": "pingou", '
'"pkg_critpath": false}',
collection_id=el6_collec.id,
user='toshio',
_status='Awaiting Review',
action='request.branch',
)
session.add(action)
if n > 1:
f17_collec = model.Collection.by_name(session, 'f17')
action = model.AdminAction(
package_id=guake_pkg.id,
collection_id=f17_collec.id,
user='ralph',
_status='Pending',
action='request.branch',
)
session.add(action)
session.commit()
def create_retired_pkgs(session):
""" Add some retired packages. """
create_collection(session)
create_package(session)
guake_pkg = model.Package.by_name(session, 'rpms', 'guake')
fedocal_pkg = model.Package.by_name(session, 'rpms', 'fedocal')
geany_pkg = model.Package.by_name(session, 'rpms', 'geany')
offlineimap_pkg = model.Package.by_name(session, 'docker', 'offlineimap')
f17_collec = model.Collection.by_name(session, 'f17')
f18_collec = model.Collection.by_name(session, 'f18')
devel_collec = model.Collection.by_name(session, 'master')
el4_collec = model.Collection.by_name(session, 'el4')
el6_collec = model.Collection.by_name(session, 'el6')
# Pkg: guake - Collection: EL4 - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=guake_pkg.id,
collection_id=el4_collec.id,
)
session.add(pkgltg)
# Pkg: guake - Collection: EL6 - Retired
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Retired',
package_id=guake_pkg.id,
collection_id=el6_collec.id,
)
session.add(pkgltg)
# Pkg: guake - Collection: devel - Approved
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Approved',
package_id=guake_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: F17 - Retired
pkgltg = model.PackageListing(
point_of_contact='pingou',
status='Retired',
package_id=fedocal_pkg.id,
collection_id=f17_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: F18 - Retired
pkgltg = model.PackageListing(
point_of_contact='orphan',
status='Retired',
package_id=fedocal_pkg.id,
collection_id=f18_collec.id,
)
session.add(pkgltg)
# Pkg: fedocal - Collection: devel - Retired
pkgltg = model.PackageListing(
point_of_contact='orphan',
status='Retired',
package_id=fedocal_pkg.id,
collection_id=devel_collec.id,
)
session.add(pkgltg)
session.commit()
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(Modeltests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| mizdebsk/pkgdb2 | tests/__init__.py | Python | gpl-2.0 | 23,339 |
# -*- coding: ISO-8859-1 -*-
#############################################
## (C)opyright by Dirk Holtwick, 2002-2007 ##
## All rights reserved ##
#############################################
__reversion__ = "$Revision: 20 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $"
from pisa_context import pisaContext
from pisa_parser import pisaParser
from pisa_util import *
from pisa_reportlab import *
from pisa_default import DEFAULT_CSS
import os
import types
import cgi
def pisaErrorDocument(dest, c):
out = StringIO.StringIO()
out.write("<p style='background-color:red;'><strong>%d error(s) occured:</strong><p>" % c.err)
for mode, line, msg, code in c.log:
if mode=="error":
out.write("<pre>%s in line %d: %s</pre>" % (mode, line, cgi.escape(msg)))
out.write("<p><strong>%d warning(s) occured:</strong><p>" % c.warn)
for mode, line, msg, code in c.log:
if mode=="warning":
out.write("<p>%s in line %d: %s</p>" % (mode, line, cgi.escape(msg)))
return pisaDocument(out.getvalue(), dest)
def pisaStory(
src,
path = None,
link_callback = None,
debug = 0,
default_css = None,
xhtml = False,
encoding = None,
c = None,
**kw):
# Prepare Context
if not c:
c = pisaContext(path, debug=debug)
c.pathCallback = link_callback
# Use a default set of CSS definitions to get an expected output
if default_css is None:
default_css = DEFAULT_CSS
# Parse and fill the story
pisaParser(src, c, default_css, xhtml, encoding)
#if 0:
# import reportlab.pdfbase.pdfmetrics as pm
# pm.dumpFontData()
# Avoid empty documents
if not c.story:
c.addPara(force=True)
# Remove anchors if they do not exist (because of a bug in Reportlab)
for frag, anchor in c.anchorFrag:
if anchor not in c.anchorName:
frag.link = None
return c
def pisaDocument(
src,
dest,
path = None,
link_callback = None,
debug = 0,
show_error_as_pdf = False,
default_css = None,
xhtml = False,
encoding = None,
**kw):
try:
log.debug("pisaDocument options:\n src = %r\n dest = %s\n path = %r\n link_callback = %r\n xhtml = %r",
src,
dest,
path,
link_callback,
xhtml)
# Prepare simple context
c = pisaContext(path, debug=debug)
c.pathCallback = link_callback
# Build story
c = pisaStory(src, path, link_callback, debug, default_css, xhtml, encoding, c=c)
# Buffer PDF into memory
out = StringIO.StringIO()
doc = PmlBaseDoc(
out,
pagesize = c.pageSize,
author = c.meta["author"].strip(),
subject = c.meta["subject"].strip(),
keywords = [x.strip() for x in c.meta["keywords"].strip().split(",") if x],
title = c.meta["title"].strip(),
showBoundary = 0,
allowSplitting = 1)
# XXX It is not possible to access PDF info, because it is private in canvas
# doc.info.producer = "pisa <http://www.holtwick.it>"
# Prepare templates and their frames
if c.templateList.has_key("body"):
body = c.templateList["body"]
del c.templateList["body"]
else:
x, y, w, h = getBox("1cm 1cm -1cm -1cm", c.pageSize)
body = PmlPageTemplate(
id="body",
frames=[
Frame(x, y, w, h,
id = "body",
leftPadding = 0,
rightPadding = 0,
bottomPadding = 0,
topPadding = 0)],
pagesize = c.pageSize)
# print body.frames
# print [body] + c.templateList.values()
doc.addPageTemplates([body] + c.templateList.values())
# Use multibuild e.g. if a TOC has to be created
if c.multiBuild:
doc.multiBuild(c.story)
else:
doc.build(c.story)
# Add watermarks
if pyPdf:
# print c.pisaBackgroundList
for bgouter in c.pisaBackgroundList:
# If we have at least one background, then lets do it
if bgouter:
istream = out
# istream.seek(2,0) #StringIO.StringIO(data)
try:
output = pyPdf.PdfFileWriter()
input1 = pyPdf.PdfFileReader(istream)
ctr = 0
for bg in c.pisaBackgroundList:
page = input1.getPage(ctr)
if bg:
if os.path.exists(bg):
# print "BACK", bg
bginput = pyPdf.PdfFileReader(open(bg, "rb"))
# page.mergePage(bginput.getPage(0))
pagebg = bginput.getPage(0)
pagebg.mergePage(page)
page = pagebg
else:
log.warn(c.warning("Background PDF %s doesn't exist.", bg))
output.addPage(page)
ctr += 1
out = StringIO.StringIO()
output.write(out)
# data = sout.getvalue()
except Exception:
log.exception(c.error("pyPDF error"))
# istream.close()
# Found a background? So leave loop after first occurence
break
else:
log.warn(c.warning("pyPDF not installed!"))
# Get the resulting PDF and write it to the file object
# passed from the caller
data = out.getvalue()
dest.write(data)
# In web frameworks for debugging purposes maybe an output of
# errors in a PDF is preferred
if show_error_as_pdf and c and c.err:
return pisaErrorDocument(dest, c)
except:
# log.exception(c.error("Document error"))
log.exception("Document error")
c.err += 1
return c
| pombreda/xhtml2pdf | sx/pisa3/pisa_document.py | Python | gpl-2.0 | 6,471 |
"""
Local settings
- Run in Debug mode
- Use console backend for emails
- Add django-extensions as app
"""
from .base import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='cA1b2=|7eYasfVOF.HCs{^1}SmXX]su*O&z43-fy|54{zr?XLn')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# ######### CELERY
# In development, all tasks will be executed locally by blocking until the task returns
# CELERY_ALWAYS_EAGER = True
# ######### END CELERY
# Don't validate passwords locally
AUTH_PASSWORD_VALIDATORS = []
| zee93/yt_lab | config/settings/local.py | Python | mit | 1,646 |
#!/usr/bin/env python
# vim: et ts=2 sw=2
from bam.app import App
from bam.handler import Handler
from bam.server import Server
| adammck/bam | bam/__init__.py | Python | mit | 129 |
# -*- coding: utf-8 -*-
from collections import defaultdict
from django.contrib.sites.models import Site
from django.utils.translation import get_language
from cms.apphook_pool import apphook_pool
from cms.models.permissionmodels import ACCESS_DESCENDANTS
from cms.models.permissionmodels import ACCESS_PAGE_AND_DESCENDANTS
from cms.models.permissionmodels import ACCESS_CHILDREN
from cms.models.permissionmodels import ACCESS_PAGE_AND_CHILDREN
from cms.models.permissionmodels import ACCESS_PAGE
from cms.models.permissionmodels import PagePermission, GlobalPagePermission
from cms.utils import get_language_from_request
from cms.utils.compat.dj import user_related_name
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import get_fallback_languages, hide_untranslated
from cms.utils.page_resolver import get_page_queryset
from cms.utils.moderator import get_title_queryset, use_draft
from cms.utils.plugins import current_site
from menus.base import Menu, NavigationNode, Modifier
from menus.menu_pool import menu_pool
def get_visible_pages(request, pages, site=None):
"""
This code is basically a many-pages-at-once version of
Page.has_view_permission.
pages contains all published pages
check if there is ANY restriction
that needs a permission page visibility calculation
"""
public_for = get_cms_setting('PUBLIC_FOR')
is_setting_public_all = public_for == 'all'
is_setting_public_staff = public_for == 'staff'
is_auth_user = request.user.is_authenticated()
visible_page_ids = []
restricted_pages = defaultdict(list)
page_permissions = PagePermission.objects.filter(can_view=True).select_related(
'page').prefetch_related('group__' + user_related_name)
for perm in page_permissions:
# collect the pages that are affected by permissions
if site and perm.page.site_id != site.pk:
continue
if perm is not None and perm not in restricted_pages[perm.page_id]:
# affective restricted pages gathering
# add the page with the perm itself
if perm.grant_on in [ACCESS_PAGE, ACCESS_PAGE_AND_CHILDREN, ACCESS_PAGE_AND_DESCENDANTS]:
restricted_pages[perm.page_id].append(perm)
restricted_pages[perm.page.publisher_public_id].append(perm)
# add children
if perm.grant_on in [ACCESS_CHILDREN, ACCESS_PAGE_AND_CHILDREN] and perm.page.numchild:
child_ids = perm.page.get_children().values_list('id', 'publisher_public_id')
for id, public_id in child_ids:
restricted_pages[id].append(perm)
restricted_pages[public_id].append(perm)
# add descendants
elif perm.grant_on in [ACCESS_DESCENDANTS, ACCESS_PAGE_AND_DESCENDANTS] and perm.page.numchild:
child_ids = perm.page.get_descendants().values_list('id', 'publisher_public_id')
for id, public_id in child_ids:
restricted_pages[id].append(perm)
restricted_pages[public_id].append(perm)
# anonymous
# no restriction applied at all
if (not is_auth_user and
is_setting_public_all and
not restricted_pages):
return [page.pk for page in pages]
if site is None:
site = current_site(request)
# authenticated user and global permission
if is_auth_user:
global_view_perms = GlobalPagePermission.objects.user_has_view_permission(
request.user, site.pk).exists()
#no page perms edge case - all visible
if ((is_setting_public_all or (
is_setting_public_staff and request.user.is_staff)) and
not restricted_pages and
not global_view_perms):
return [page.pk for page in pages]
#no page perms edge case - none visible
elif (is_setting_public_staff and
not request.user.is_staff and
not restricted_pages and
not global_view_perms):
return []
def has_global_perm():
if has_global_perm.cache < 0:
has_global_perm.cache = 1 if request.user.has_perm('cms.view_page') else 0
return bool(has_global_perm.cache)
has_global_perm.cache = -1
def has_permission_membership(page):
"""
PagePermission user group membership tests
"""
user_pk = request.user.pk
page_pk = page.pk
for perm in restricted_pages[page_pk]:
if perm.user_id == user_pk:
return True
if not perm.group_id:
continue
user_set = getattr(perm.group, user_related_name)
# Optimization equivalent to
# if user_pk in user_set.values_list('pk', flat=True)
if any(user_pk == user.pk for user in user_set.all()):
return True
return False
for page in pages:
to_add = False
# default to false, showing a restricted page is bad
# explicitly check all the conditions
# of settings and permissions
is_restricted = page.pk in restricted_pages
# restricted_pages contains as key any page.pk that is
# affected by a permission grant_on
if is_auth_user:
# a global permission was given to the request's user
if global_view_perms:
to_add = True
# setting based handling of unrestricted pages
elif not is_restricted and (
is_setting_public_all or (
is_setting_public_staff and request.user.is_staff)
):
# authenticated user, no restriction and public for all
# or
# authenticated staff user, no restriction and public for staff
to_add = True
# check group and user memberships to restricted pages
elif is_restricted and has_permission_membership(page):
to_add = True
elif has_global_perm():
to_add = True
# anonymous user, no restriction
elif not is_restricted and is_setting_public_all:
to_add = True
# store it
if to_add:
visible_page_ids.append(page.pk)
return visible_page_ids
def page_to_node(page, home, cut):
"""
Transform a CMS page into a navigation node.
:param page: the page you wish to transform
:param home: a reference to the "home" page (the page with path="0001)
:param cut: Should we cut page from its parent pages? This means the node will not
have a parent anymore.
"""
# Theses are simple to port over, since they are not calculated.
# Other attributes will be added conditionnally later.
attr = {'soft_root': page.soft_root,
'auth_required': page.login_required,
'reverse_id': page.reverse_id, }
parent_id = page.parent_id
# Should we cut the Node from its parents?
if home and page.parent_id == home.pk and cut:
parent_id = None
# possible fix for a possible problem
#if parent_id and not page.parent.get_calculated_status():
# parent_id = None # ????
if page.limit_visibility_in_menu == None:
attr['visible_for_authenticated'] = True
attr['visible_for_anonymous'] = True
else:
attr['visible_for_authenticated'] = page.limit_visibility_in_menu == 1
attr['visible_for_anonymous'] = page.limit_visibility_in_menu == 2
attr['is_home'] = page.is_home
# Extenders can be either navigation extenders or from apphooks.
extenders = []
if page.navigation_extenders:
extenders.append(page.navigation_extenders)
# Is this page an apphook? If so, we need to handle the apphooks's nodes
lang = get_language()
# Only run this if we have a translation in the requested language for this
# object. The title cache should have been prepopulated in CMSMenu.get_nodes
# but otherwise, just request the title normally
if not hasattr(page, 'title_cache') or lang in page.title_cache:
app_name = page.get_application_urls(fallback=False)
if app_name: # it means it is an apphook
app = apphook_pool.get_apphook(app_name)
for menu in app.menus:
extenders.append(menu.__name__)
if extenders:
attr['navigation_extenders'] = extenders
# Do we have a redirectURL?
attr['redirect_url'] = page.get_redirect() # save redirect URL if any
# Now finally, build the NavigationNode object and return it.
ret_node = NavigationNode(
page.get_menu_title(),
page.get_absolute_url(),
page.pk,
parent_id,
attr=attr,
visible=page.in_navigation,
)
return ret_node
class CMSMenu(Menu):
def get_nodes(self, request):
page_queryset = get_page_queryset(request)
site = Site.objects.get_current()
lang = get_language_from_request(request)
filters = {
'site': site,
}
if hide_untranslated(lang, site.pk):
filters['title_set__language'] = lang
if not use_draft(request):
page_queryset = page_queryset.published()
pages = page_queryset.filter(**filters).order_by("path")
ids = {}
nodes = []
first = True
home_cut = False
home_children = []
home = None
actual_pages = []
# cache view perms
visible_pages = get_visible_pages(request, pages, site)
for page in pages:
# Pages are ordered by path, therefore the first page is the root
# of the page tree (a.k.a "home")
if page.pk not in visible_pages:
# Don't include pages the user doesn't have access to
continue
if not home:
home = page
if first and page.pk != home.pk:
home_cut = True
if (page.parent_id == home.pk or page.parent_id in home_children) and home_cut:
home_children.append(page.pk)
if (page.pk == home.pk and home.in_navigation) or page.pk != home.pk:
first = False
ids[page.id] = page
actual_pages.append(page)
page.title_cache = {}
langs = [lang]
if not hide_untranslated(lang):
langs.extend(get_fallback_languages(lang))
titles = list(get_title_queryset(request).filter(page__in=ids, language__in=langs))
for title in titles: # add the title and slugs and some meta data
page = ids[title.page_id]
page.title_cache[title.language] = title
for page in actual_pages:
if page.title_cache:
nodes.append(page_to_node(page, home, home_cut))
return nodes
menu_pool.register_menu(CMSMenu)
class NavExtender(Modifier):
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
if post_cut:
return nodes
exts = []
# rearrange the parent relations
home = None
for node in nodes:
if node.attr.get("is_home", False):
home = node
extenders = node.attr.get("navigation_extenders", None)
if extenders:
for ext in extenders:
if not ext in exts:
exts.append(ext)
for extnode in nodes:
if extnode.namespace == ext and not extnode.parent_id:# if home has nav extenders but home is not visible
if node.attr.get("is_home", False) and not node.visible:
extnode.parent_id = None
extnode.parent_namespace = None
extnode.parent = None
else:
extnode.parent_id = node.id
extnode.parent_namespace = node.namespace
extnode.parent = node
node.children.append(extnode)
removed = []
# find all not assigned nodes
for menu in menu_pool.menus.items():
if hasattr(menu[1], 'cms_enabled') and menu[1].cms_enabled and not menu[0] in exts:
for node in nodes:
if node.namespace == menu[0]:
removed.append(node)
if breadcrumb:
# if breadcrumb and home not in navigation add node
if breadcrumb and home and not home.visible:
home.visible = True
if request.path_info == home.get_absolute_url():
home.selected = True
else:
home.selected = False
# remove all nodes that are nav_extenders and not assigned
for node in removed:
nodes.remove(node)
return nodes
menu_pool.register_modifier(NavExtender)
class SoftRootCutter(Modifier):
"""
Ask evildmp/superdmp if you don't understand softroots!
Softroot description from the docs:
A soft root is a page that acts as the root for a menu navigation tree.
Typically, this will be a page that is the root of a significant new
section on your site.
When the soft root feature is enabled, the navigation menu for any page
will start at the nearest soft root, rather than at the real root of
the site’s page hierarchy.
This feature is useful when your site has deep page hierarchies (and
therefore multiple levels in its navigation trees). In such a case, you
usually don’t want to present site visitors with deep menus of nested
items.
For example, you’re on the page -Introduction to Bleeding-?, so the menu
might look like this:
School of Medicine
Medical Education
Departments
Department of Lorem Ipsum
Department of Donec Imperdiet
Department of Cras Eros
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <this is the current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
Department of Curabitur a Purus
Department of Sed Accumsan
Department of Etiam
Research
Administration
Contact us
Impressum
which is frankly overwhelming.
By making -Department of Mediaeval Surgery-? a soft root, the menu
becomes much more manageable:
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
"""
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
# only apply this modifier if we're pre-cut (since what we do is cut)
# or if no id argument is provided, indicating {% show_menu_below_id %}
if post_cut or root_id:
return nodes
selected = None
root_nodes = []
# find the selected node as well as all the root nodes
for node in nodes:
if node.selected:
selected = node
if not node.parent:
root_nodes.append(node)
# if we found a selected ...
if selected:
# and the selected is a softroot
if selected.attr.get("soft_root", False):
# get it's descendants
nodes = selected.get_descendants()
# remove the link to parent
selected.parent = None
# make the selected page the root in the menu
nodes = [selected] + nodes
else:
# if it's not a soft root, walk ancestors (upwards!)
nodes = self.find_ancestors_and_remove_children(selected, nodes)
return nodes
def find_and_remove_children(self, node, nodes):
for child in node.children:
if child.attr.get("soft_root", False):
self.remove_children(child, nodes)
return nodes
def remove_children(self, node, nodes):
for child in node.children:
nodes.remove(child)
self.remove_children(child, nodes)
node.children = []
def find_ancestors_and_remove_children(self, node, nodes):
"""
Check ancestors of node for soft roots
"""
if node.parent:
if node.parent.attr.get("soft_root", False):
nodes = node.parent.get_descendants()
node.parent.parent = None
nodes = [node.parent] + nodes
else:
nodes = self.find_ancestors_and_remove_children(node.parent, nodes)
else:
for newnode in nodes:
if newnode != node and not newnode.parent:
self.find_and_remove_children(newnode, nodes)
for child in node.children:
if child != node:
self.find_and_remove_children(child, nodes)
return nodes
menu_pool.register_modifier(SoftRootCutter)
| amaozhao/basecms | cms/menu.py | Python | mit | 18,035 |
import struct
import time
class system_profiler:
def __init__(self, x86_mem_pae, base_address):
self.x86_mem_pae = x86_mem_pae
self.base_address = base_address
def machine_info(self, sym_addr):
machine_info = self.x86_mem_pae.read(sym_addr+self.base_address, 40); # __DATA.__common _machine_info
data = struct.unpack('IIIIQIIII', machine_info)
return data
def sw_vers(self, sym_addr): # 11.11.23 64bit suppport
os_version = self.x86_mem_pae.read(sym_addr+self.base_address, 10) # __DATA.__common _osversion
data = struct.unpack('10s', os_version)
return data
def get_gmtime(self, sym_addr):
time_val = self.x86_mem_pae.read(sym_addr+self.base_address, 4);
data = struct.unpack('i', time_val)
strtime = time.strftime("%a %b %d %H:%M:%S %Y", time.gmtime(data[0]))
return strtime
#################################### PUBLIC FUNCTIONS ####################################
def get_system_profile(x86_mem_pae, sw_vers, machine_info, boottime, sleeptime, waketime, base_address):
Sys_Profile = system_profiler(x86_mem_pae, base_address)
print '[+] Mac OS X Basic Information'
sw_ver_data = Sys_Profile.sw_vers(sw_vers)[0]
print ' [-] Darwin kernel Build Number: %s'%sw_ver_data.strip('\x00')
data = Sys_Profile.machine_info(machine_info)
print ' [-] Darwin Kernel Major Version: %d'%data[0]
print ' [-] Darwin Kernel Minor Version: %d'%data[1]
print ' [-] Number of Physical CPUs: %d'%data[2]
print ' [-] Size of memory in bytes: %d bytes'%data[3]
print ' [-] Size of physical memory: %d bytes'%data[4]
print ' [-] Number of physical CPUs now available: %d'%data[5]
print ' [-] Max number of physical CPUs now possible: %d'%data[6]
print ' [-] Number of logical CPUs now available: %d'%data[7]
print ' [-] Max number of logical CPUs now possible: %d'%data[8]
if boottime != 0:
print ' [-] Kernel Boot Time: %s (GMT +0)'%Sys_Profile.get_gmtime(boottime) # n0fate's Idea
#print ' [-] Kernel Boot Time: %s (GMT +0)'%tsb
print ' [-] Last Hibernated Sleep Time: %s (GMT +0)'%Sys_Profile.get_gmtime(sleeptime) # CL's Idea
print ' [-] Last Hibernated Wake Time: %s (GMT +0)'%Sys_Profile.get_gmtime(waketime) # CL's Idea | jevinskie/volafox | volafox/plugins/system_profiler.py | Python | gpl-2.0 | 2,335 |
from django.shortcuts import render
from django.contrib.auth.decorators import permission_required
from django.conf import settings
from django.template import RequestContext
from django.template.loader import render_to_string
from wagtail.wagtailadmin import hooks
from wagtail.wagtailcore.models import Page, PageRevision, UserPagePermissionsProxy
from wagtail.wagtaildocs.models import Document
from wagtail.wagtailimages.models import get_image_model
# Panels for the homepage
class SiteSummaryPanel(object):
name = 'site_summary'
order = 100
def __init__(self, request):
self.request = request
def render(self):
return render_to_string('wagtailadmin/home/site_summary.html', {
'total_pages': Page.objects.count() - 1, # subtract 1 because the root node is not a real page
'total_images': get_image_model().objects.count(),
'total_docs': Document.objects.count(),
}, RequestContext(self.request))
class PagesForModerationPanel(object):
name = 'pages_for_moderation'
order = 200
def __init__(self, request):
self.request = request
user_perms = UserPagePermissionsProxy(request.user)
self.page_revisions_for_moderation = user_perms.revisions_for_moderation().select_related('page', 'user').order_by('-created_at')
def render(self):
return render_to_string('wagtailadmin/home/pages_for_moderation.html', {
'page_revisions_for_moderation': self.page_revisions_for_moderation,
}, RequestContext(self.request))
class RecentEditsPanel(object):
name = 'recent_edits'
order = 300
def __init__(self, request):
self.request = request
# Last n edited pages
self.last_edits = PageRevision.objects.raw(
"""
select wp.* FROM
wagtailcore_pagerevision wp JOIN (
SELECT max(created_at) as max_created_at, page_id FROM wagtailcore_pagerevision group by page_id
) as max_rev on max_rev.max_created_at = wp.created_at and wp.user_id = %s order by wp.created_at desc
""", [request.user.id])[:5]
def render(self):
return render_to_string('wagtailadmin/home/recent_edits.html', {
'last_edits': self.last_edits,
}, RequestContext(self.request))
@permission_required('wagtailadmin.access_admin')
def home(request):
panels = [
SiteSummaryPanel(request),
PagesForModerationPanel(request),
RecentEditsPanel(request),
]
for fn in hooks.get_hooks('construct_homepage_panels'):
fn(request, panels)
return render(request, "wagtailadmin/home.html", {
'site_name': settings.WAGTAIL_SITE_NAME,
'panels': sorted(panels, key=lambda p: p.order),
'user': request.user
})
def error_test(request):
raise Exception("This is a test of the emergency broadcast system.")
| suziesparkle/wagtail | wagtail/wagtailadmin/views/home.py | Python | bsd-3-clause | 2,933 |
import os
import unittest
import xml.etree.ElementTree
from conans.client.generators import VisualStudioLegacyGenerator
from conans.model.build_info import CppInfo
from conans.model.conan_file import ConanFile
from conans.model.env_info import EnvValues
from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.test_files import temp_folder
from conans.test.utils.tools import TestBufferConanOutput
class VisualStudioLegacyGeneratorTest(unittest.TestCase):
def valid_xml_test(self):
conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
ref = ConanFileReference.loads("MyPkg/0.1@user/testing")
folder1 = temp_folder()
folder1 = folder1.replace("\\", "/")
os.makedirs(os.path.join(folder1, "include"))
os.makedirs(os.path.join(folder1, "lib"))
cpp_info = CppInfo(folder1)
conanfile.deps_cpp_info.update(cpp_info, ref.name)
ref = ConanFileReference.loads("My.Fancy-Pkg_2/0.1@user/testing")
folder2 = temp_folder()
folder2 = folder2.replace("\\", "/")
os.makedirs(os.path.join(folder2, "include"))
os.makedirs(os.path.join(folder2, "lib"))
cpp_info = CppInfo(folder2)
conanfile.deps_cpp_info.update(cpp_info, ref.name)
generator = VisualStudioLegacyGenerator(conanfile)
content = generator.content
xml.etree.ElementTree.fromstring(content)
self.assertIn('AdditionalIncludeDirectories=""%s/include";"%s/include";"'
% (folder1, folder2), content)
self.assertIn('AdditionalLibraryDirectories=""%s/lib";"%s/lib";"'
% (folder1, folder2), content)
| memsharded/conan | conans/test/unittests/client/generators/visual_studio_legacy_test.py | Python | mit | 1,805 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Movie.studio'
db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['movie_library.Studio'], null=True))
def backwards(self, orm):
# Changing field 'Movie.studio'
db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(default='', to=orm['movie_library.Studio']))
models = {
u'movie_library.actor': {
'Meta': {'object_name': 'Actor'},
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'movie_library.director': {
'Meta': {'object_name': 'Director'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'nick_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'})
},
u'movie_library.genre': {
'Meta': {'object_name': 'Genre'},
'explanation': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'movie_library.movie': {
'Meta': {'object_name': 'Movie'},
'actor': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Actor']", 'symmetrical': 'False'}),
'cover_art': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'director': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Director']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'release_year': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'studio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['movie_library.Studio']", 'null': 'True', 'blank': 'True'}),
'synopsis': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'writer': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Writer']", 'symmetrical': 'False'})
},
u'movie_library.studio': {
'Meta': {'object_name': 'Studio'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state_province': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
u'movie_library.writer': {
'Meta': {'object_name': 'Writer'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['movie_library'] | atimothee/django-playground | django_playground/movie_library/migrations/0006_auto__chg_field_movie_studio.py | Python | bsd-3-clause | 4,380 |
"""
WSGI config for temperature project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "temperature.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| smithdtyler/hautomation | server/temperature/temperature/wsgi.py | Python | gpl-2.0 | 1,144 |
def func(self):
import time
time.sleep(6)
return "VERSION"
| sahlinet/httptest | version.py | Python | mit | 71 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
from collections import namedtuple
from unittest import mock
from unittest.mock import MagicMock
import pytest
from azure.mgmt.containerinstance.models import ContainerState, Event
from airflow.exceptions import AirflowException
from airflow.providers.microsoft.azure.operators.azure_container_instances import (
AzureContainerInstancesOperator,
)
def make_mock_cg(container_state, events=None):
"""
Make a mock Container Group as the underlying azure Models have read-only attributes
See https://docs.microsoft.com/en-us/rest/api/container-instances/containergroups
"""
events = events or []
instance_view_dict = {"current_state": container_state, "events": events}
instance_view = namedtuple("InstanceView", instance_view_dict.keys())(*instance_view_dict.values())
container_dict = {"instance_view": instance_view}
container = namedtuple("Container", container_dict.keys())(*container_dict.values())
container_g_dict = {"containers": [container]}
container_g = namedtuple("ContainerGroup", container_g_dict.keys())(*container_g_dict.values())
return container_g
def make_mock_cg_with_missing_events(container_state):
"""
Make a mock Container Group as the underlying azure Models have read-only attributes
See https://docs.microsoft.com/en-us/rest/api/container-instances/containergroups
This creates the Container Group without events.
This can happen, when the container group is provisioned, but not started.
"""
instance_view_dict = {"current_state": container_state, "events": None}
instance_view = namedtuple("InstanceView", instance_view_dict.keys())(*instance_view_dict.values())
container_dict = {"instance_view": instance_view}
container = namedtuple("Container", container_dict.keys())(*container_dict.values())
container_g_dict = {"containers": [container]}
container_g = namedtuple("ContainerGroup", container_g_dict.keys())(*container_g_dict.values())
return container_g
class TestACIOperator(unittest.TestCase):
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
)
aci.execute(None)
assert aci_mock.return_value.create_or_update.call_count == 1
(called_rg, called_cn, called_cg), _ = aci_mock.return_value.create_or_update.call_args
assert called_rg == 'resource-group'
assert called_cn == 'container-name'
assert called_cg.location == 'region'
assert called_cg.image_registry_credentials is None
assert called_cg.restart_policy == 'Never'
assert called_cg.os_type == 'Linux'
called_cg_container = called_cg.containers[0]
assert called_cg_container.name == 'container-name'
assert called_cg_container.image == 'container-image'
assert aci_mock.return_value.delete.call_count == 1
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_with_failures(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=1, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
)
with pytest.raises(AirflowException):
aci.execute(None)
assert aci_mock.return_value.delete.call_count == 1
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_with_tags(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
tags = {"testKey": "testValue"}
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
tags=tags,
)
aci.execute(None)
assert aci_mock.return_value.create_or_update.call_count == 1
(called_rg, called_cn, called_cg), _ = aci_mock.return_value.create_or_update.call_args
assert called_rg == 'resource-group'
assert called_cn == 'container-name'
assert called_cg.location == 'region'
assert called_cg.image_registry_credentials is None
assert called_cg.restart_policy == 'Never'
assert called_cg.os_type == 'Linux'
assert called_cg.tags == tags
called_cg_container = called_cg.containers[0]
assert called_cg_container.name == 'container-name'
assert called_cg_container.image == 'container-image'
assert aci_mock.return_value.delete.call_count == 1
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_with_messages_logs(self, aci_mock):
events = [Event(message="test"), Event(message="messages")]
expected_c_state1 = ContainerState(state='Succeeded', exit_code=0, detail_status='test')
expected_cg1 = make_mock_cg(expected_c_state1, events)
expected_c_state2 = ContainerState(state='Running', exit_code=0, detail_status='test')
expected_cg2 = make_mock_cg(expected_c_state2, events)
expected_c_state3 = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg3 = make_mock_cg(expected_c_state3, events)
aci_mock.return_value.get_state.side_effect = [expected_cg1, expected_cg2, expected_cg3]
aci_mock.return_value.get_logs.return_value = ["test", "logs"]
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
)
aci.execute(None)
assert aci_mock.return_value.create_or_update.call_count == 1
assert aci_mock.return_value.get_state.call_count == 3
assert aci_mock.return_value.get_logs.call_count == 3
assert aci_mock.return_value.delete.call_count == 1
def test_name_checker(self):
valid_names = ['test-dash', 'name-with-length---63' * 3]
invalid_names = [
'test_underscore',
'name-with-length---84' * 4,
'name-ending-with-dash-',
'-name-starting-with-dash',
]
for name in invalid_names:
with pytest.raises(AirflowException):
AzureContainerInstancesOperator._check_name(name)
for name in valid_names:
checked_name = AzureContainerInstancesOperator._check_name(name)
assert checked_name == name
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_with_ipaddress(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
ipaddress = MagicMock()
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
ip_address=ipaddress,
)
aci.execute(None)
assert aci_mock.return_value.create_or_update.call_count == 1
(_, _, called_cg), _ = aci_mock.return_value.create_or_update.call_args
assert called_cg.ip_address == ipaddress
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_with_windows_os_and_diff_restart_policy(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
restart_policy="Always",
os_type='Windows',
)
aci.execute(None)
assert aci_mock.return_value.create_or_update.call_count == 1
(_, _, called_cg), _ = aci_mock.return_value.create_or_update.call_args
assert called_cg.restart_policy == 'Always'
assert called_cg.os_type == 'Windows'
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_fails_with_incorrect_os_type(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
with pytest.raises(AirflowException) as ctx:
AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
os_type='MacOs',
)
assert (
str(ctx.value) == "Invalid value for the os_type argument. "
"Please set 'Linux' or 'Windows' as the os_type. "
"Found `MacOs`."
)
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
def test_execute_fails_with_incorrect_restart_policy(self, aci_mock):
expected_c_state = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg = make_mock_cg(expected_c_state)
aci_mock.return_value.get_state.return_value = expected_cg
aci_mock.return_value.exists.return_value = False
with pytest.raises(AirflowException) as ctx:
AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
restart_policy='Everyday',
)
assert (
str(ctx.value) == "Invalid value for the restart_policy argument. "
"Please set one of 'Always', 'OnFailure','Never' as the restart_policy. "
"Found `Everyday`"
)
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
@mock.patch('airflow.providers.microsoft.azure.operators.azure_container_instances.sleep')
def test_execute_correct_sleep_cycle(self, sleep_mock, aci_mock):
expected_c_state1 = ContainerState(state='Running', exit_code=0, detail_status='test')
expected_cg1 = make_mock_cg(expected_c_state1)
expected_c_state2 = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg2 = make_mock_cg(expected_c_state2)
aci_mock.return_value.get_state.side_effect = [expected_cg1, expected_cg1, expected_cg2]
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
)
aci.execute(None)
# sleep is called at the end of cycles. Thus, the Terminated call does not trigger sleep
assert sleep_mock.call_count == 2
@mock.patch(
"airflow.providers.microsoft.azure.operators.azure_container_instances.AzureContainerInstanceHook"
)
@mock.patch("logging.Logger.exception")
def test_execute_with_missing_events(self, log_mock, aci_mock):
expected_c_state1 = ContainerState(state='Running', exit_code=0, detail_status='test')
expected_cg1 = make_mock_cg_with_missing_events(expected_c_state1)
expected_c_state2 = ContainerState(state='Terminated', exit_code=0, detail_status='test')
expected_cg2 = make_mock_cg(expected_c_state2)
aci_mock.return_value.get_state.side_effect = [expected_cg1, expected_cg2]
aci_mock.return_value.exists.return_value = False
aci = AzureContainerInstancesOperator(
ci_conn_id=None,
registry_conn_id=None,
resource_group='resource-group',
name='container-name',
image='container-image',
region='region',
task_id='task',
)
aci.execute(None)
assert log_mock.call_count == 0
| apache/incubator-airflow | tests/providers/microsoft/azure/operators/test_azure_container_instances.py | Python | apache-2.0 | 15,580 |
#!/usr/bin/env python
# coding: utf-8
"""
Read Employee data to return turnover information.
This is a example Python program to read and process XML files.
"""
class Employees:
""" Read Employee data to return turnover information. """
__version__ = '0.3.0'
def __init__(self, infile=None):
self.employees = None
if infile is not None:
self.loadFromFile(infile)
def loadFromFile(self, infile):
from xml.etree import ElementTree
if isinstance(infile, file):
self.employees = ElementTree.parse(infile.name)
else:
self.employees = ElementTree.parse(infile)
def dump(self):
import sys
import StringIO
from xml.etree import ElementTree
stdold, stdnew = sys.stdout, StringIO.StringIO()
sys.stdout = stdnew
ElementTree.dump(self.employees)
sys.stdout = stdold
return stdnew.getvalue()
def getById(self, id):
""" Returns turnover for all years for an employee. """
years = self.employees.findall(
"employee[@id='" + str(id) + "']/turnover/year")
if len(years) > 0:
total = reduce(lambda t, y: t + y,
map(lambda y: int(y.text), years), 0)
else:
total = None
return total
def getByName(self, name):
""" Returns turnover for all years for an employee. """
years = self.employees.findall(
"employee[@name='" + name + "']/turnover/year")
if len(years) > 0:
total = reduce(lambda t, y: t + y,
map(lambda y: int(y.text), years), 0)
else:
total = None
return total
def getByYear(self, name, year):
""" Returns turnover for an employees by year. """
years = self.employees.findall(
"employee[@name='%s']/turnover/year[@id='%s']" % (name, year))
if len(years) > 0:
total = reduce(lambda t, y: t + y,
map(lambda y: int(y.text), years), 0)
else:
total = None
return total
def getTotalByYear(self, year):
""" Returns turnover for all employees by year. """
years = self.employees.findall(
"employee/turnover/year[@id='" + str(year) + "']")
if len(years) > 0:
total = reduce(lambda t, y: t + y,
map(lambda y: int(y.text), years), 0)
else:
total = None
return total
| frankhjung/python-xml | employees/employees.py | Python | gpl-3.0 | 2,544 |
# -*- coding: utf-8 -*-
#
# Copyright © 2017 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""Tests for lineprofiler.py."""
# Standard library imports
import os
# Third party imports
from pytestqt import qtbot
from qtpy.QtCore import Qt
from spyder.utils.qthelpers import qapplication
MAIN_APP = qapplication()
# Local imports
from spyder_line_profiler.widgets.lineprofiler import LineProfilerWidget
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock # Python 2
TEST_SCRIPT = \
"""import time
@profile
def foo():
time.sleep(1)
xs = []
for k in range(100):
xs = xs + ['x']
foo()"""
def test_profile_and_display_results(qtbot, tmpdir, monkeypatch):
"""Run profiler on simple script and check that results are okay."""
os.chdir(tmpdir.strpath)
testfilename = tmpdir.join('test_foo.py').strpath
with open(testfilename, 'w') as f:
f.write(TEST_SCRIPT)
MockQMessageBox = Mock()
monkeypatch.setattr('spyder_line_profiler.widgets.lineprofiler.QMessageBox',
MockQMessageBox)
widget = LineProfilerWidget(None)
qtbot.addWidget(widget)
with qtbot.waitSignal(widget.sig_finished, timeout=10000, raising=True):
widget.analyze(testfilename)
MockQMessageBox.assert_not_called()
dt = widget.datatree
assert dt.topLevelItemCount() == 1 # number of functions profiled
top = dt.topLevelItem(0)
assert top.data(0, Qt.DisplayRole).startswith('foo ')
assert top.childCount() == 6
for i in range(6):
assert top.child(i).data(0, Qt.DisplayRole) == i + 2 # line no
assert top.child(2).data(1, Qt.DisplayRole) == '1' # hits
assert top.child(3).data(1, Qt.DisplayRole) == '1'
assert top.child(4).data(1, Qt.DisplayRole) == '101'
assert top.child(5).data(1, Qt.DisplayRole) == '100'
assert float(top.child(2).data(2, Qt.DisplayRole)) >= 900 # time (ms)
assert float(top.child(2).data(2, Qt.DisplayRole)) <= 1200
assert float(top.child(3).data(2, Qt.DisplayRole)) <= 100
assert float(top.child(4).data(2, Qt.DisplayRole)) <= 100
assert float(top.child(5).data(2, Qt.DisplayRole)) <= 100
| spyder-ide/spyder.line_profiler | spyder_line_profiler/widgets/tests/test_lineprofiler.py | Python | mit | 2,287 |
#!/usr/bin/python
import subprocess
import signal
import os
splunk_home = os.environ['SPLUNK_HOME']
os.chdir(splunk_home + "/etc/apps/shuttl/bin")
start_shuttl_server = "exec $JAVA_HOME/bin/java -Djetty.home=. -Dsplunk.home=../../../../ -cp .:../lib/*:./* com.splunk.shuttl.server.ShuttlJettyServer"
process = subprocess.Popen(start_shuttl_server, shell=True, stdout=subprocess.PIPE)
print("Started Shuttl pid: " + str(process.pid))
def handle_signals(a, b):
print("Will kill Shuttl [" + str(process.pid) + "]")
process.kill()
signal.signal(signal.SIGTERM, handle_signals)
signal.signal(signal.SIGQUIT, handle_signals)
signal.signal(signal.SIGINT, handle_signals)
print("Waiting for Shuttl [" + str(process.pid) + "]")
process.wait()
| splunk/splunk-shuttl | package/bin/start.py | Python | apache-2.0 | 749 |
from __future__ import absolute_import
import operator
from jsonfield import JSONField
from django.db import models
from django.db.models import Q
from django.utils import timezone
from sentry.db.models import Model, sane_repr
from sentry.db.models.fields import FlexibleForeignKey
from sentry.ownership.grammar import load_schema
class ProjectOwnership(Model):
__core__ = True
project = FlexibleForeignKey('sentry.Project', unique=True)
raw = models.TextField(null=True)
schema = JSONField(null=True)
fallthrough = models.BooleanField(default=True)
date_created = models.DateTimeField(default=timezone.now)
last_updated = models.DateTimeField(default=timezone.now)
is_active = models.BooleanField(default=True)
# An object to indicate ownership is implicitly everyone
Everyone = object()
class Meta:
app_label = 'sentry'
db_table = 'sentry_projectownership'
__repr__ = sane_repr('project_id', 'is_active')
@classmethod
def get_owners(cls, project_id, data):
"""
For a given project_id, and event data blob.
If Everyone is returned, this means we implicitly are
falling through our rules and everyone is responsible.
If an empty list is returned, this means there are explicitly
no owners.
"""
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
ownership = cls(
project_id=project_id,
)
rules = []
if ownership.schema is not None:
for rule in load_schema(ownership.schema):
if rule.test(data):
rules.append(rule)
if not rules:
return cls.Everyone if ownership.fallthrough else [], None
owners = {o for rule in rules for o in rule.owners}
return filter(None, resolve_actors(owners, project_id).values()), rules
def resolve_actors(owners, project_id):
""" Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None. """
from sentry.api.fields.actor import Actor
from sentry.models import User, Team
if not owners:
return {}
users, teams = [], []
owners_lookup = {}
for owner in owners:
# teams aren't technical case insensitive, but teams also
# aren't allowed to have non-lowercase in slugs, so
# this kinda works itself out correctly since they won't match
owners_lookup[(owner.type, owner.identifier.lower())] = owner
if owner.type == 'user':
users.append(owner)
elif owner.type == 'team':
teams.append(owner)
actors = {}
if users:
actors.update({
('user', email.lower()): Actor(u_id, User)
for u_id, email in User.objects.filter(
reduce(
operator.or_,
[Q(emails__email__iexact=o.identifier) for o in users]
),
# We don't require verified emails
# emails__is_verified=True,
is_active=True,
sentry_orgmember_set__organizationmemberteam__team__projectteam__project_id=project_id,
).distinct().values_list('id', 'emails__email')
})
if teams:
actors.update({
('team', slug): Actor(t_id, Team)
for t_id, slug in Team.objects.filter(
slug__in=[o.identifier for o in teams],
projectteam__project_id=project_id,
).values_list('id', 'slug')
})
return {
o: actors.get((o.type, o.identifier.lower()))
for o in owners
}
| ifduyue/sentry | src/sentry/models/projectownership.py | Python | bsd-3-clause | 3,743 |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import subprocess
from cerbero.config import Platform
from cerbero.commands import Command, register_command
from cerbero.utils import N_
class Shell(Command):
doc = N_('Starts a shell with the build environment')
name = 'shell'
def __init__(self):
Command.__init__(self, [])
def run(self, config, args):
if config.platform == Platform.WINDOWS:
# $MINGW_PREFIX/home/username
msyscon = os.environ.get('MSYSCON',
os.path.join(os.path.expanduser('~'), '..', '..', 'msys.bat'))
if 'mysys.bat' in msyscon:
msyscon = '%s -noxvrt' % msys
subprocess.check_call(msyscon)
else:
shell = os.environ.get('SHELL', '/bin/bash')
os.execlp(shell, shell)
register_command(Shell)
| freedesktop-unofficial-mirror/gstreamer-sdk__cerbero | cerbero/commands/shell.py | Python | lgpl-2.1 | 1,684 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.utils import translation
import logging
class ForceLangMiddleware:
def process_request(self, request):
if not translation.LANGUAGE_SESSION_KEY in request.session \
or not request.session[translation.LANGUAGE_SESSION_KEY] \
and settings.LANGUAGE_CODE:
translation.activate(settings.LANGUAGE_CODE)
request.session[translation.LANGUAGE_SESSION_KEY] = settings.LANGUAGE_CODE
logging.info("Activated default lang '{}'".format(settings.LANGUAGE_CODE))
| efornal/sdump | app/middleware.py | Python | gpl-3.0 | 627 |
from __future__ import absolute_import
from . import ws2_32
from . import oleaut32
'''
A small module for keeping a database of ordinal to symbol
mappings for DLLs which frequently get linked without symbolic
infoz.
'''
ords = {
b'ws2_32.dll':ws2_32.ord_names,
b'wsock32.dll':ws2_32.ord_names,
b'oleaut32.dll':oleaut32.ord_names,
}
def ordLookup(libname, ord, make_name=False):
'''
Lookup a name for the given ordinal if it's in our
database.
'''
names = ords.get(libname.lower())
if names == None:
if make_name is True:
return 'ord%d' % ord
return None
name = names.get(ord)
if name == None:
return 'ord%d' % ord
return name
| pombredanne/pefile | ordlookup/__init__.py | Python | mit | 715 |
from django.db import models
import datetime
# Create your models here.
class AudioFile(models.Model):
# Titulo: string. No nulo
# Link permanente: string. No nulo
# Modo de compartir (público/privado): String
# URL de la imagen: string
# Descripcion: string
# Duracion: int
# Genero: string
# Descargable: bool
# Fecha creacion: datetime. No nulo.
# Fecha actualizacion: datetime. No nulo.
# Usuario al que pertenece
| mpvillafranca/hear-cloud | apps/audio/models.py | Python | gpl-3.0 | 464 |
print "myscript" | thepian/thepian-pages | test/web/mymodule/myscript.py | Python | agpl-3.0 | 17 |
def foo(x):
pass
x = 42
y = 42
z = 42
foo(x, y, <caret>) | siosio/intellij-community | python/testData/multipleArgumentsCompletion/noExceptionIfMoreArgumentsThanParameters.py | Python | apache-2.0 | 61 |
from django.shortcuts import render
from db_storage.models import Image
from django.http import HttpResponse
from django.views.generic import View
# Create your views here.
class ImageView(View):
def get(self, request, file_name):
image = Image.objects.get(file_name=file_name)
return HttpResponse(image.data, content_type=image.mimetype)
| jskopek/frame | db_storage/views.py | Python | mit | 360 |
"""
Test displayed value of a vector variable while doing watchpoint operations
"""
from __future__ import print_function
import os
import time
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestValueOfVectorVariableTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(
oslist=["windows"],
bugnumber="llvm.org/pr24446: WINDOWS XFAIL TRIAGE - Watchpoints not supported on Windows")
def test_value_of_vector_variable_using_watchpoint_set(self):
"""Test verify displayed value of vector variable."""
exe = self.getBuildArtifact("a.out")
d = {'C_SOURCES': self.source, 'EXE': exe}
self.build(dictionary=d)
self.setTearDownCleanup(dictionary=d)
self.value_of_vector_variable_with_watchpoint_set()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Our simple source filename.
self.source = 'main.c'
def value_of_vector_variable_with_watchpoint_set(self):
"""Test verify displayed value of vector variable"""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Set break to get a frame
self.runCmd("b main")
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# Value of a vector variable should be displayed correctly
self.expect(
"watchpoint set variable global_vector",
WATCHPOINT_CREATED,
substrs=['new value: (1, 2, 3, 4)'])
| youtube/cobalt | third_party/llvm-project/lldb/packages/Python/lldbsuite/test/functionalities/watchpoint/watchpoint_on_vectors/TestValueOfVectorVariable.py | Python | bsd-3-clause | 1,625 |
#import factorial
#import square
x = int(raw_input("What is 'x'?\n"))
y = int(raw_input("What is y?\n"))
# question0 = str(raw_input("Define a y value? (y/n)\n"))
# if (question0 == "y","Y","yes","Yes"):
# y = int(raw_input("What will 'y' be?\n"))
# elif (y == "n","N","no","No"):
# question2 = str(raw_input("Is y = 10 ok?\n"))
# if (question2 == "y","Y","yes","Yes"):
# y = 10
# elif (question2 == "n","N","no","No"):
# y = int(raw_input("What will 'y' be?\n"))
# else:
# print "Please insert and interger"
# else:
# print "Please insert an interger."
print "Using that information, we can do some mathematical equations."
if x > y: #is not None:
print "x, %d, is greater than y, %d." % (x, y)
elif x == y: #is not None:
print "x, %d, is equal to y, %d." % (x, y)
elif x < y: #is not None:
print "x, %d, is less than y, %d." % (x, y)
elif x is not int:
print "x should be a interger, you put it as %d" % (x)
elif x is None:
print "Please rerun the code."
else:
print "Something went wrong!"
add = (x + y)
sub = (x - y)
mult = (x * y)
div = (x / y)
rem = (x % y)
xeven = (x % 2 == 0)
xodd = (x % 2 != 0)
yeven = (y % 2 == 0)
yodd = (y % 2 != 0)
# xfact = (factorial(x))
# yfact = (factorial(y))
print "If you add x and y, you'll get %s." % add
print "If you subtract x and y, you'll get %s." % sub
print "If you multiply x and y, you'll get %s." % mult
print "If you divide x and y, you'll get %s, with a remainder of %s." % (div, rem)
if (x % 2 == 0):
print "x is even."
if (x % 2 != 0):
print "x is odd."
if (y % 2 == 0):
print "y is even."
if (y % 2 != 0):
print "y is odd."
print "If you square x, you get %s, and y squared is %s." % ((x^2),(y^2))
print "If you cube x, you get %s, and y cubed is %s." % ((x^3), (y^3))
#print "If you take x factorial, you get %s, and y factorial is %s." % ((xfact), (yfact))
#print "The square root of x is %s, and the square root of y is %s." % (square(x), square(y))
print ""
# from sys import argv
# import random
# value = (1,2,3,4,5,6)
# roll, string = argv
# def choice(roll):
# random.choice(dice)
# return choice
# choice(roll)
# dice = choice(value) | chrisortman/CIS-121 | k0459866/Lessons/ex12.py | Python | mit | 2,216 |
__author__ = 'LLCoolDave'
# ToDo: Replace by proper unit tests, currently broken as it stands
import logging
from MafiaBot.MafiaBot import *
from sopel.tools import Identifier
log = logging.getLogger('MafiaBot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
log.addHandler(ch)
mb = MafiaBot()
mainchannel = mb.mainchannel
deadchat = mb.deadchat
mafiachannel = mb.mafiachannels[0]
playerlist = [Identifier('PLAYERA'), Identifier('PLAYERB'), Identifier('PLAYERC'), Identifier('PLAYERD'), Identifier('PLAYERE'), Identifier('PLAYERF'), Identifier('PLAYERG')]
class botstub:
def msg(self, target, msg, max_messages=0):
log.info('BOT MSG @'+str(target)+': '+msg)
def join(self, param):
log.info('BOT JOIN: '+param)
def part(self, param):
log.info('BOT PART: '+param)
def write(self, param):
log.info('BOT WRITE: '+param[0]+param[1])
def say(self, msg, max_messages=0):
log.info('BOT SAY: '+msg)
def SendCommand(command, source, nick, param):
reply = mb.HandleCommand(command, source, nick, param, botstub())
log.info('COMMAND '+command+' by '+str(nick)+' in '+str(source)+' with parameter \''+str(param)+'\'')
if reply is not None:
log.info('RESPONSE ' + reply)
def SendPlayerCommand(command, source, nick, param):
reply = mb.HandlePlayerCommand(command, source, nick, param, botstub())
log.info('COMMAND '+command+' by '+str(nick)+' in '+str(source)+' with parameter \''+str(param)+'\'')
if reply is not None:
log.info('RESPONSE ' + reply)
def GameLoop():
mb.GameLoop(botstub())
def LogOff():
log.setLevel(50)
def LogOn():
log.setLevel(10)
def JoinAndStart():
for player in playerlist:
SendCommand('join', mainchannel, player, '')
SendCommand('setup', mainchannel, playerlist[0], 'load test')
SendCommand('setup', mainchannel, playerlist[0], 'daystart')
SendCommand('players', mainchannel, playerlist[0], '')
#test votes command
SendCommand('votes', mainchannel, playerlist[2], '')
SendCommand('start', mainchannel, playerlist[0], '')
SendCommand('votes', playerlist[3], playerlist[3], '')
def Vote(player, target='NoLynch'):
strtar = str(target)
SendCommand('vote', mainchannel, player, strtar)
def PassDay(target='NoLynch'):
for player in playerlist:
Vote(player, target)
def BreakPoint():
pass
def Main():
# all players join
LogOff()
JoinAndStart()
# get mafia
scums = [player for player in playerlist if mb.players[player].faction == MafiaPlayer.FACTION_MAFIA]
# get prostitute
prostitutes = [player for player in playerlist if isinstance(mb.players[player].role, Roles['prostitute'])]
if prostitutes:
pros = prostitutes[0]
else:
pros = None
# get prostitute
medics = [player for player in playerlist if isinstance(mb.players[player].role, Roles['medic'])]
if medics:
medic = medics[0]
else:
medic = None
cops = [player for player in playerlist if isinstance(mb.players[player].role, Roles['cop'])]
if cops:
cop = cops[0]
else:
cop = None
paritycops = [player for player in playerlist if isinstance(mb.players[player].role, Roles['paritycop'])]
if paritycops:
paritycop = paritycops[0]
else:
paritycop = None
trackers = [player for player in playerlist if isinstance(mb.players[player].role, Roles['tracker'])]
if trackers:
tracker = trackers[0]
else:
tracker = None
watchers = [player for player in playerlist if isinstance(mb.players[player].role, Roles['watcher'])]
if watchers:
watcher = watchers[0]
else:
watcher = None
bulletproofs = [player for player in playerlist if isinstance(mb.players[player].role, Roles['bulletproof'])]
if bulletproofs:
bulletproof = bulletproofs[0]
else:
bulletproof = None
gunsmiths = [player for player in playerlist if isinstance(mb.players[player].role, Roles['gunsmith'])]
if gunsmiths:
gunsmith = gunsmiths[0]
else:
gunsmith = None
vigilantes = [player for player in playerlist if isinstance(mb.players[player].role, Roles['vigilante'])]
if vigilantes:
vigilante = vigilantes[0]
else:
vigilante = None
aliens = [player for player in playerlist if isinstance(mb.players[player].role, Roles['alien'])]
if aliens:
alien = aliens[0]
else:
alien = None
if scums[0] in prostitutes:
scum = scums[1]
else:
scum = scums[0]
# get setup
setup = [(str(player), mb.players[player].GetFaction(), mb.players[player].role.GetRoleName()) for player in playerlist]
LogOn()
log.debug('This game\'s setup is: '+str(setup))
i = 0
while mb.active:
# lynch player i
PassDay(playerlist[i])
LogOff()
SendPlayerCommand('pass', gunsmith, gunsmith, bulletproof)
SendPlayerCommand('pass', pros, pros, cop)
SendPlayerCommand('pass', medic, medic, playerlist[0])
SendPlayerCommand('pass', cop, cop, playerlist[0])
SendPlayerCommand('pass', tracker, tracker, cop)
SendPlayerCommand('pass', watcher, watcher, cop)
SendPlayerCommand('pass', vigilante, vigilante, cop)
SendPlayerCommand('check', paritycop, paritycop, playerlist[6-i])
SendCommand('nokill', mafiachannel, scum, playerlist[0])
SendCommand('nokill', mafiachannel, pros, playerlist[0])
LogOn()
GameLoop()
i += 1
if __name__ == "__main__":
Main()
| LLCoolDave/MafiaBot | MafiaBotTest.py | Python | mit | 5,747 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#/*
# * Copyright (с) 2011 XBMC-Russia, HD-lab Team, E-mail: dev@hd-lab.ru
# * Writer (C) 03/03/2011, Kostynoy S.A., E-mail: seppius2@gmail.com
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/licenses/gpl.html
# */
import xbmc, xbmcgui, xbmcplugin, xbmcaddon, os, urllib, httplib, socket, sys, re
import socket
socket.setdefaulttimeout(12)
h = int(sys.argv[1])
icon = xbmc.translatePath(os.path.join(os.getcwd().replace(';', ''),'icon.png'))
vinil = xbmc.translatePath(os.path.join(os.getcwd().replace(';', ''),'vinil.png'))
def showMessage(heading, message, times = 3000):
xbmc.executebuiltin('XBMC.Notification("%s", "%s", %s, "%s")'%(heading, message, times, icon))
def clean(name):
remove=[('\t',''),(' ',' '),('–',''),('<br>','\n'),('<br />','\n'),('&','&'),('"','"'),(''','\''),(' ',' '),('«','"'),('»', '"'),('—','-')]
for trash, crap in remove:
name=name.replace(trash, crap)
return name
def GET(target):
conn = httplib.HTTPConnection(host='mp3tales.ru', port=80)
html_headers = {
'User-Agent': 'XBMC/10-series (Python addon; XBMC-Russia; HD-lab Team; 2011; http://www.xbmc.org)', \
'Host': 'mp3tales.ru', \
'Accept': 'text/html, application/xml, application/xhtml+xml, */*' } # 'Cookie': 'rules=yes'
conn.request(method='GET', url=target, headers=html_headers)
response = conn.getresponse()
html = response.read().decode('cp1251', "ignore").encode('utf8', "ignore")
conn.close()
return html
def getsearch(params):
KB = xbmc.Keyboard()
KB.setHeading('Что ищем?')
KB.doModal()
if (KB.isConfirmed()):
getmenu({'search':urllib.quote_plus(KB.getText().decode('utf8', "replace").encode('cp1251', "replace"))})
else:
return False
def getmenu(params):
try:
search = params['search']
ss = '&s=%s&t=AND' % search
except:
search = ''
ss = ''
if search == '':
uri = '%s?mode=getsearch'%sys.argv[0]
li = xbmcgui.ListItem('Поиск?', iconImage = icon, thumbnailImage = icon)
xbmcplugin.addDirectoryItem(h, uri, li, True)
try: page = int(urllib.unquote_plus(params['page']))
except: page = 1
http = GET('/tales/?p=%d%s'%(page,ss))
row = re.compile("<li class='item'><a href='(.*?)'>(.*?)</a>", re.DOTALL).findall(http)
found_items = len(row)
if found_items > 0:
for new_target, new_name in row:
new_name = new_name.replace('<b>','').replace('</b>','')
uri = '%s?mode=playpage'%sys.argv[0]
uri += '&href=%s' % urllib.quote_plus(new_target)
uri += '&name=%s' % urllib.quote_plus(new_name)
uri += '&search=%s' % search
li = xbmcgui.ListItem(new_name, iconImage = vinil, thumbnailImage = vinil)
xbmcplugin.addDirectoryItem(h, uri, li, False)
if (found_items >24):
uri = '%s?mode=getmenu&page=%d' % (sys.argv[0], page + 1)
li = xbmcgui.ListItem('Далее >', iconImage = icon, thumbnailImage = icon)
xbmcplugin.addDirectoryItem(h, uri, li, True)
else:
showMessage('Ой', 'Показать нечего', 5000)
return False
xbmcplugin.endOfDirectory(h)
def playpage(params):
try: href = urllib.unquote_plus(params['href'])
except: href = '/'
http = GET(href)
try: name = urllib.unquote_plus(params['name'])
except: name = 'No name'
furl = 'http://mp3tales.ru' + re.compile('file: "(.*?)",').findall(http)[0]
try:
fimg = 'http://mp3tales.ru' + re.compile('<img src="(.*?)" ').findall(http)[0]
except:
fimg = vinil
li = xbmcgui.ListItem(name, iconImage=fimg, thumbnailImage=fimg)
li.setInfo(type = 'music', infoLabels={'title':name, 'genre':'Мультфильм', 'album':'http://mp3tales.ru/'})
xbmc.Player().play(furl, li)
def get_params(paramstring):
param=[]
if len(paramstring)>=2:
params=paramstring
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params = get_params(sys.argv[2])
mode = None
func = None
try: mode = urllib.unquote_plus(params['mode'])
except: getmenu(params)
if mode != None:
try: func = globals()[mode]
except: pass
if func: func(params)
| sshnaidm/ru | plugin.audio.mp3tales.ru/default.py | Python | gpl-2.0 | 4,999 |
import unittest
import UniDomain.Classes as Classes
#---- unittest Test Classes below here
class TestConfig(unittest.TestCase):
"""Test Config Class"""
def test_Config(self):
"""Check if required config defaults are set"""
self.config = Classes.Config()
self.assertTrue('plugin_authen' in self.config.config, 'no authen plugin in default config')
self.assertTrue('plugin_author' in self.config.config, 'no author plugin in default config')
self.assertTrue('cachedir' in self.config.config, 'no cache directory in default config')
self.assertTrue('policydir' in self.config.config, 'no policy directory in default config')
self.assertTrue('dnszone' in self.config.config, 'no dnszone in default config')
self.assertTrue('passwdfile' in self.config.config, 'no passwdfile in default config')
self.assertTrue('groupfile' in self.config.config, 'no groupfile in default config')
def test_readconf(self):
"""check if readconf behaves like we want"""
self.config = Classes.Config(file = 'testconf.xml', passwdfile = 'xyz')
self.assertEqual(len(self.config.ldapservers), 1, 'reading value from file does not work.')
self.assertEqual(type(self.config.debug),type(True), 'debug value is not bool!')
self.assertEqual(self.config.passwdfile, 'xyz', 'passing config vars as args doesnt work')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestConfig)
unittest.TextTestRunner(verbosity=2).run(suite)
| spahan/unixdmoain | lib/test/Classes.py | Python | bsd-3-clause | 1,575 |
#!/usr/bin/python
#----------------------------------------------------------------------
# For the shells csh, tcsh:
# ( setenv PYTHONPATH /Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python ; ./globals.py <path> [<path> ...])
#
# For the shells sh, bash:
# PYTHONPATH=/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python ./globals.py <path> [<path> ...]
#----------------------------------------------------------------------
import lldb
import commands
import optparse
import os
import shlex
import sys
def get_globals(raw_path, options):
error = lldb.SBError()
# Resolve the path if needed
path = os.path.expanduser(raw_path)
# Create a target using path + options
target = lldb.debugger.CreateTarget(path, options.arch, options.platform, False, error)
if target:
# Get the executable module
module = target.module[target.executable.basename]
if module:
# Keep track of which variables we have already looked up
global_names = list()
# Iterate through all symbols in the symbol table and watch for any DATA symbols
for symbol in module.symbols:
if symbol.type == lldb.eSymbolTypeData:
# The symbol is a DATA symbol, lets try and find all global variables
# that match this name and print them
global_name = symbol.name
# Make sure we don't lookup the same variable twice
if global_name not in global_names:
global_names.append(global_name)
# Find all global variables by name
global_variable_list = module.FindGlobalVariables (target, global_name, lldb.UINT32_MAX)
if global_variable_list:
# Print results for anything that matched
for global_variable in global_variable_list:
print 'name = %s' % global_variable.name # returns the global variable name as a string
print 'value = %s' % global_variable.value # Returns the variable value as a string
print 'type = %s' % global_variable.type # Returns an lldb.SBType object
print 'addr = %s' % global_variable.addr # Returns an lldb.SBAddress (section offset address) for this global
print 'file_addr = 0x%x' % global_variable.addr.file_addr # Returns the file virtual address for this global
print 'location = %s' % global_variable.location # returns the global variable value as a string
print 'size = %s' % global_variable.size # Returns the size in bytes of this global variable
print
def globals(command_args):
'''Extract all globals from any arguments which must be paths to object files.'''
usage = "usage: %prog [options] <PATH> [PATH ...]"
description='''This command will find all globals in the specified object file and return an list() of lldb.SBValue objects (which might be empty).'''
parser = optparse.OptionParser(description=description, prog='globals',usage=usage)
parser.add_option('-v', '--verbose', action='store_true', dest='verbose', help='display verbose debug info', default=False)
parser.add_option('-a', '--arch', type='string', metavar='arch', dest='arch', help='Specify an architecture (or triple) to use when extracting from a file.')
parser.add_option('-p', '--platform', type='string', metavar='platform', dest='platform', help='Specify the platform to use when creating the debug target. Valid values include "localhost", "darwin-kernel", "ios-simulator", "remote-freebsd", "remote-macosx", "remote-ios", "remote-linux".')
try:
(options, args) = parser.parse_args(command_args)
except:
return
for path in args:
get_globals (path, options)
if __name__ == '__main__':
lldb.debugger = lldb.SBDebugger.Create()
globals (sys.argv[1:])
| s20121035/rk3288_android5.1_repo | external/lldb/examples/python/globals.py | Python | gpl-3.0 | 4,206 |
from __future__ import print_function, division, absolute_import
import contextlib
import pickle
import warnings
import numpy as np
from sklearn.externals.joblib import load as jl_load
__all__ = ['printoptions', 'verbosedump', 'verboseload', 'dump', 'load']
warnings.warn("This module might be deprecated in favor of msmbuilder.io",
PendingDeprecationWarning)
@contextlib.contextmanager
def printoptions(*args, **kwargs):
original = np.get_printoptions()
np.set_printoptions(*args, **kwargs)
yield
np.set_printoptions(**original)
def dump(value, filename, compress=None, cache_size=None):
"""Save an arbitrary python object using pickle.
Parameters
-----------
value : any Python object
The object to store to disk using pickle.
filename : string
The name of the file in which it is to be stored
compress : None
No longer used
cache_size : positive number, optional
No longer used
See Also
--------
load : corresponding loader
"""
if compress is not None or cache_size is not None:
warnings.warn("compress and cache_size are no longer valid options")
with open(filename, 'wb') as f:
pickle.dump(value, f)
def load(filename):
"""Load an object that has been saved with dump.
We try to open it using the pickle protocol. As a fallback, we
use joblib.load. Joblib was the default prior to msmbuilder v3.2
Parameters
----------
filename : string
The name of the file to load.
"""
try:
with open(filename, 'rb') as f:
return pickle.load(f)
except Exception as e1:
try:
return jl_load(filename)
except Exception as e2:
raise IOError(
"Unable to load {} using the pickle or joblib protocol.\n"
"Pickle: {}\n"
"Joblib: {}".format(filename, e1, e2)
)
def verbosedump(value, fn, compress=None):
"""Verbose wrapper around dump"""
print('Saving "%s"... (%s)' % (fn, type(value)))
dump(value, fn, compress=compress)
def verboseload(fn):
"""Verbose wrapper around load.
Try to use pickle. If that fails, try to use joblib.
"""
print('loading "%s"...' % fn)
return load(fn)
| Eigenstate/msmbuilder | msmbuilder/utils/io.py | Python | lgpl-2.1 | 2,305 |
from django.apps import AppConfig
class BlogConfig(AppConfig):
name = 'rvpsite.blog'
verbose_name = 'GERENCIAMENTO DO BLOG' | rpadilha/rvpsite | rvpsite/blog/apps.py | Python | agpl-3.0 | 133 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Generate simple languages JSON module.
LANGUAGES = {
"en": "English",
"de": "Deutsch",
"fr": "Français",
"zh-cn": "中文(简体)",
"zh-tw": "繁體中文",
"ko": "한국어",
"ja": "日本語",
"ru": "Русский",
"es": "Español",
"it": "Italiano",
}
import json
def main():
print """// This file is auto generated, do not modify.
"use strict";
define([], function() {
return %s;
});""" % json.dumps(LANGUAGES)
if __name__ == "__main__":
main()
| spreedbox-packaging/spreed-webrtc-debian | src/i18n/helpers/languages.py | Python | agpl-3.0 | 517 |
#!/usr/bin/env python
import btrsync
import cProfile
import pstats
#cProfile.run("btrsync.hash_dir()","fooprof")
#p = pstats.Stats('fooprof')
#p.sort_stats("cum")
#p.print_stats()
print btrsync.hash_dir()
| RobinMorisset/Btrsync | test_sha1.py | Python | gpl-3.0 | 208 |
# StackOverflow. Non-core, *rolls eyes*.
import sys
import tty
import termios
def getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
| pbl64k/icfpc2015 | getch.py | Python | bsd-2-clause | 316 |
"""Library to parse Heroes of the Storm replays."""
| HoTSStuff/replaylib | replaylib/__init__.py | Python | apache-2.0 | 52 |
import time
import numpy as np
import scipy.ndimage
from desiutil.log import get_logger
from desispec.qproc.qframe import QFrame
def qproc_sky_subtraction(qframe,return_skymodel=False) :
"""
Fast sky subtraction directly applied to the input qframe.
Args:
qframe : DESI QFrame object
Optional:
return_skymodel returns the skymodel as an array of same shape as qframe.flux
"""
"""
The algorithm is an iterative subtraction of the mean spectrum
of the sky fibers. It involves 2 resampling at each step,
- from the fiber wavelength array to the model wavelength array
- from the model wavelength array back to the fiber wavelength array
Here all spectra have the same size. It is the number of CCD rows
(maybe trimed), but the wavelength array of each fiber is different.
Let's call R_ki the resampling matrix of the spectrum F_i of a fiber
with wavelength array (i) on the wavelength array (k).
The sky model defined on the wavelength array (k)
after the first iteration is the mean of the resampled sky fiber spectra F_j :
F^(1)_k = 1/n sum_j R_kj F_j
The model resampled back to a fiber (i) is :
F^(1)_i = R_ik F^(1)_k = (sum_j 1/n R_ik R_kj) F_j
We call A^(1) this matrix that gives a model F^(1)_i = A^(1)_ij F_j
from the measurements.
Omitting the indices, the residuals are R^(1) = (1-A^(1)) F.
Let's call A^(n) this matrix that gives a model F^(n) = A^(n) F
after n iterations. The residuals are R^(n) = (1-A^(n)) F.
If one applies at iteration (n+1) the algorithm on those residuals,
one gets an increment to the model A^(1) R^(n) = A^(1) (1-A^(n)) F ,
so that the total model F^(n+1) = ( A^(n) + A^(1) (1-A^(n)) ) F
and so, A^(n+1) = A^(n) + A^(1) (1-A^(n))
For our fast algorithm A^(n+1) != A^(n); there is a gain at each
iteration.
This is different from a standard chi2 fit of a linear model.
In the standard chi2 fit, if H_pi is the derivative of the model
for the data point 'i' with respect to a parameter 'p',
ignoring the weights ,
the best fit parameters are X = ( H H^t )^{-1} H D , so that
the value of the model for the data points
is H^t X = H^t ( H H^t )^{-1} H D .
such that we have the matrix A = H^t ( H H^t )^{-1} H
It's trivial to see that A is a projector, A^2 = A,
so applying a second time the algorithm on the residuals
give the model parameter increment dX = A (1-A) D = (A-A^2) D = 0,
i.e. not improvement after the first fit.
"""
log=get_logger()
t0=time.time()
log.info("Starting...")
twave=np.linspace(np.min(qframe.wave),np.max(qframe.wave),qframe.wave.shape[1]*2) # oversampling
tflux=np.zeros((qframe.flux.shape[0],twave.size))
tivar=np.zeros((qframe.flux.shape[0],twave.size))
if return_skymodel :
sky=np.zeros(qframe.flux.shape)
if qframe.mask is not None :
qframe.ivar *= (qframe.mask==0)
if qframe.fibermap is None :
log.error("Empty fibermap in qframe, cannot know which are the sky fibers!")
raise RuntimeError("Empty fibermap in qframe, cannot know which are the sky fibers!")
skyfibers = np.where(qframe.fibermap["OBJTYPE"]=="SKY")[0]
if skyfibers.size==0 :
log.warning("No sky fibers! I am going to declare the faintest half of the fibers as sky fibers")
mflux = np.median(qframe.flux,axis=1)
ii = np.argsort(mflux)
qframe.fibermap["OBJTYPE"][ii[:ii.size//2]] = "SKY"
skyfibers = np.where(qframe.fibermap["OBJTYPE"]=="SKY")[0]
log.info("Sky fibers: {}".format(skyfibers))
for loop in range(5) : # I need several iterations to remove the effect of the wavelength solution noise
for i in skyfibers :
jj=(qframe.ivar[i]>0)
if np.sum(jj)>0 :
tflux[i]=np.interp(twave,qframe.wave[i,jj],qframe.flux[i,jj])
ttsky = np.median(tflux[skyfibers],axis=0)
if return_skymodel :
for i in range(qframe.flux.shape[0]) :
tmp=np.interp(qframe.wave[i],twave,ttsky)
jj=(qframe.flux[i]!=0)
if np.sum(jj)>0 :
qframe.flux[i,jj] -= tmp[jj]
sky[i] += tmp
else :
for i in range(qframe.flux.shape[0]) :
jj=(qframe.flux[i]!=0)
if np.sum(jj)>0 :
qframe.flux[i,jj] -= np.interp(qframe.wave[i,jj],twave,ttsky)
t1=time.time()
log.info(" done in {:3.1f} sec".format(t1-t0))
if return_skymodel :
return sky
| desihub/desispec | py/desispec/qproc/qsky.py | Python | bsd-3-clause | 4,559 |
RECORDS_RATE_LIMIT = "15/m" # This rate was arbitrarily chosen due to a lack of data. It may need to be changed later.
class UserCreditPathwayStatus:
"""Allowed values for UserCreditPathway.status"""
SENT = "sent"
| edx/credentials | credentials/apps/records/constants.py | Python | agpl-3.0 | 227 |
"""
Bounce a ball on the screen, using gravity.
"""
import arcade
# --- Set up the constants
# Size of the screen
SCREEN_WIDTH = 600
SCREEN_HEIGHT = 600
# Size of the circle.
CIRCLE_RADIUS = 20
# How strong the gravity is.
GRAVITY_CONSTANT = 0.3
# Percent of velocity maintained on a bounce.
BOUNCINESS = 0.9
def draw(delta_time):
"""
Use this function to draw everything to the screen.
"""
# Start the render. This must happen before any drawing
# commands. We do NOT need an stop render command.
arcade.start_render()
# Draw our rectangle
arcade.draw_circle_filled(draw.x, draw.y, CIRCLE_RADIUS,
arcade.color.BLACK)
# Modify rectangles position based on the delta
# vector. (Delta means change. You can also think
# of this as our speed and direction.)
draw.x += draw.delta_x
draw.y += draw.delta_y
draw.delta_y -= GRAVITY_CONSTANT
# Figure out if we hit the left or right edge and need to reverse.
if draw.x < CIRCLE_RADIUS and draw.delta_x < 0:
draw.delta_x *= -BOUNCINESS
elif draw.x > SCREEN_WIDTH - CIRCLE_RADIUS and draw.delta_x > 0:
draw.delta_x *= -BOUNCINESS
# See if we hit the bottom
if draw.y < CIRCLE_RADIUS and draw.delta_y < 0:
# If we bounce with a decent velocity, do a normal bounce.
# Otherwise we won't have enough time resolution to accurate represent
# the bounce and it will bounce forever. So we'll divide the bounciness
# by half to let it settle out.
if draw.delta_y * -1 > GRAVITY_CONSTANT * 15:
draw.delta_y *= -BOUNCINESS
else:
draw.delta_y *= -BOUNCINESS / 2
# Below are function-specific variables. Before we use them
# in our function, we need to give them initial values. Then
# the values will persist between function calls.
#
# In other languages, we'd declare the variables as 'static' inside the
# function to get that same functionality.
#
# Later on, we'll use 'classes' to track position and velocity for multiple
# objects.
draw.x = CIRCLE_RADIUS
draw.y = SCREEN_HEIGHT - CIRCLE_RADIUS
draw.delta_x = 2
draw.delta_y = 0
def main():
# Open up our window
arcade.open_window("Bouncing Ball", SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.set_background_color(arcade.color.WHITE)
# Tell the computer to call the draw command at the specified interval.
arcade.schedule(draw, 1 / 80)
# Run the program
arcade.run()
# When done running the program, close the window.
arcade.close_window()
if __name__ == "__main__":
main()
| mikemhenry/arcade | examples/bouncing_ball.py | Python | mit | 2,607 |
"""
This class is defined to override standard pickle functionality
The goals of it follow:
-Serialize lambdas and nested functions to compiled byte code
-Deal with main module correctly
-Deal with other non-serializable objects
It does not include an unpickler, as standard python unpickling suffices.
This module was extracted from the `cloud` package, developed by `PiCloud, Inc.
<http://www.picloud.com>`_.
Copyright (c) 2012, Regents of the University of California.
Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of California, Berkeley nor the
names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import print_function
import operator
import os
import io
import pickle
import struct
import sys
import types
from functools import partial
import itertools
import dis
import traceback
if sys.version < '3':
from pickle import Pickler
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
PY3 = False
else:
types.ClassType = type
from pickle import _Pickler as Pickler
from io import BytesIO as StringIO
PY3 = True
#relevant opcodes
STORE_GLOBAL = dis.opname.index('STORE_GLOBAL')
DELETE_GLOBAL = dis.opname.index('DELETE_GLOBAL')
LOAD_GLOBAL = dis.opname.index('LOAD_GLOBAL')
GLOBAL_OPS = [STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL]
HAVE_ARGUMENT = dis.HAVE_ARGUMENT
EXTENDED_ARG = dis.EXTENDED_ARG
def islambda(func):
return getattr(func,'__name__') == '<lambda>'
_BUILTIN_TYPE_NAMES = {}
for k, v in types.__dict__.items():
if type(v) is type:
_BUILTIN_TYPE_NAMES[v] = k
def _builtin_type(name):
return getattr(types, name)
class CloudPickler(Pickler):
dispatch = Pickler.dispatch.copy()
def __init__(self, file, protocol=None):
Pickler.__init__(self, file, protocol)
# set of modules to unpickle
self.modules = set()
# map ids to dictionary. used to ensure that functions can share global env
self.globals_ref = {}
def dump(self, obj):
self.inject_addons()
try:
return Pickler.dump(self, obj)
except RuntimeError as e:
if 'recursion' in e.args[0]:
msg = """Could not pickle object as excessively deep recursion required."""
raise pickle.PicklingError(msg)
def save_memoryview(self, obj):
"""Fallback to save_string"""
Pickler.save_string(self, str(obj))
def save_buffer(self, obj):
"""Fallback to save_string"""
Pickler.save_string(self,str(obj))
if PY3:
dispatch[memoryview] = save_memoryview
else:
dispatch[buffer] = save_buffer
def save_unsupported(self, obj):
raise pickle.PicklingError("Cannot pickle objects of type %s" % type(obj))
dispatch[types.GeneratorType] = save_unsupported
# itertools objects do not pickle!
for v in itertools.__dict__.values():
if type(v) is type:
dispatch[v] = save_unsupported
def save_module(self, obj):
"""
Save a module as an import
"""
self.modules.add(obj)
self.save_reduce(subimport, (obj.__name__,), obj=obj)
dispatch[types.ModuleType] = save_module
def save_codeobject(self, obj):
"""
Save a code object
"""
if PY3:
args = (
obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames,
obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
obj.co_cellvars
)
else:
args = (
obj.co_argcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code,
obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name,
obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars
)
self.save_reduce(types.CodeType, args, obj=obj)
dispatch[types.CodeType] = save_codeobject
def save_function(self, obj, name=None):
""" Registered with the dispatch to handle all function types.
Determines what kind of function obj is (e.g. lambda, defined at
interactive prompt, etc) and handles the pickling appropriately.
"""
write = self.write
if name is None:
name = obj.__name__
modname = pickle.whichmodule(obj, name)
# print('which gives %s %s %s' % (modname, obj, name))
try:
themodule = sys.modules[modname]
except KeyError:
# eval'd items such as namedtuple give invalid items for their function __module__
modname = '__main__'
if modname == '__main__':
themodule = None
if themodule:
self.modules.add(themodule)
if getattr(themodule, name, None) is obj:
return self.save_global(obj, name)
# if func is lambda, def'ed at prompt, is in main, or is nested, then
# we'll pickle the actual function object rather than simply saving a
# reference (as is done in default pickler), via save_function_tuple.
if islambda(obj) or obj.__code__.co_filename == '<stdin>' or themodule is None:
#print("save global", islambda(obj), obj.__code__.co_filename, modname, themodule)
self.save_function_tuple(obj)
return
else:
# func is nested
klass = getattr(themodule, name, None)
if klass is None or klass is not obj:
self.save_function_tuple(obj)
return
if obj.__dict__:
# essentially save_reduce, but workaround needed to avoid recursion
self.save(_restore_attr)
write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
self.save(obj.__dict__)
write(pickle.TUPLE + pickle.REDUCE)
else:
write(pickle.GLOBAL + modname + '\n' + name + '\n')
self.memoize(obj)
dispatch[types.FunctionType] = save_function
def save_function_tuple(self, func):
""" Pickles an actual func object.
A func comprises: code, globals, defaults, closure, and dict. We
extract and save these, injecting reducing functions at certain points
to recreate the func object. Keep in mind that some of these pieces
can contain a ref to the func itself. Thus, a naive save on these
pieces could trigger an infinite loop of save's. To get around that,
we first create a skeleton func object using just the code (this is
safe, since this won't contain a ref to the func), and memoize it as
soon as it's created. The other stuff can then be filled in later.
"""
save = self.save
write = self.write
code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(func)
save(_fill_function) # skeleton function updater
write(pickle.MARK) # beginning of tuple that _fill_function expects
# create a skeleton function object and memoize it
save(_make_skel_func)
save((code, len(closure), base_globals))
write(pickle.REDUCE)
self.memoize(func)
# save the rest of the func data needed by _fill_function
save(f_globals)
save(defaults)
save(closure) # maintains backcompat
save(dct)
write(pickle.TUPLE)
write(pickle.REDUCE) # applies _fill_function on the tuple
@staticmethod
def extract_code_globals(co):
"""
Find all globals names read or written to by codeblock co
"""
code = co.co_code
if not PY3:
code = [ord(c) for c in code]
names = co.co_names
out_names = set()
n = len(code)
i = 0
extended_arg = 0
while i < n:
op = code[i]
i += 1
if op >= HAVE_ARGUMENT:
oparg = code[i] + code[i+1] * 256 + extended_arg
extended_arg = 0
i += 2
if op == EXTENDED_ARG:
extended_arg = oparg*65536
if op in GLOBAL_OPS:
out_names.add(names[oparg])
# see if nested function have any global refs
if co.co_consts:
for const in co.co_consts:
if type(const) is types.CodeType:
out_names |= CloudPickler.extract_code_globals(const)
return out_names
def extract_func_data(self, func):
"""
Turn the function into a tuple of data necessary to recreate it:
code, globals, defaults, closure, dict
"""
code = func.__code__
# extract all global ref's
func_global_refs = self.extract_code_globals(code)
# process all variables referenced by global environment
f_globals = {}
for var in func_global_refs:
if var in func.__globals__:
f_globals[var] = func.__globals__[var]
# defaults requires no processing
defaults = func.__defaults__
# process closure
closure = [c.cell_contents for c in func.__closure__] if func.__closure__ else []
# save the dict
dct = func.__dict__
base_globals = self.globals_ref.get(id(func.__globals__), {})
self.globals_ref[id(func.__globals__)] = base_globals
return (code, f_globals, defaults, closure, dct, base_globals)
def save_builtin_function(self, obj):
if obj.__module__ is "__builtin__":
return self.save_global(obj)
return self.save_function(obj)
dispatch[types.BuiltinFunctionType] = save_builtin_function
def save_global(self, obj, name=None, pack=struct.pack):
if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
if obj in _BUILTIN_TYPE_NAMES:
return self.save_reduce(_builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
if name is None:
name = obj.__name__
modname = getattr(obj, "__module__", None)
if modname is None:
modname = pickle.whichmodule(obj, name)
if modname == '__main__':
themodule = None
else:
__import__(modname)
themodule = sys.modules[modname]
self.modules.add(themodule)
if hasattr(themodule, name) and getattr(themodule, name) is obj:
return Pickler.save_global(self, obj, name)
typ = type(obj)
if typ is not obj and isinstance(obj, (type, types.ClassType)):
d = dict(obj.__dict__) # copy dict proxy to a dict
if not isinstance(d.get('__dict__', None), property):
# don't extract dict that are properties
d.pop('__dict__', None)
d.pop('__weakref__', None)
# hack as __new__ is stored differently in the __dict__
new_override = d.get('__new__', None)
if new_override:
d['__new__'] = obj.__new__
# workaround for namedtuple (hijacked by PySpark)
if getattr(obj, '_is_namedtuple_', False):
self.save_reduce(_load_namedtuple, (obj.__name__, obj._fields))
return
self.save(_load_class)
self.save_reduce(typ, (obj.__name__, obj.__bases__, {"__doc__": obj.__doc__}), obj=obj)
d.pop('__doc__', None)
# handle property and staticmethod
dd = {}
for k, v in d.items():
if isinstance(v, property):
k = ('property', k)
v = (v.fget, v.fset, v.fdel, v.__doc__)
elif isinstance(v, staticmethod) and hasattr(v, '__func__'):
k = ('staticmethod', k)
v = v.__func__
elif isinstance(v, classmethod) and hasattr(v, '__func__'):
k = ('classmethod', k)
v = v.__func__
dd[k] = v
self.save(dd)
self.write(pickle.TUPLE2)
self.write(pickle.REDUCE)
else:
raise pickle.PicklingError("Can't pickle %r" % obj)
dispatch[type] = save_global
dispatch[types.ClassType] = save_global
def save_instancemethod(self, obj):
# Memoization rarely is ever useful due to python bounding
if PY3:
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
else:
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__, obj.__self__.__class__),
obj=obj)
dispatch[types.MethodType] = save_instancemethod
def save_inst(self, obj):
"""Inner logic to save instance. Based off pickle.save_inst
Supports __transient__"""
cls = obj.__class__
memo = self.memo
write = self.write
save = self.save
if hasattr(obj, '__getinitargs__'):
args = obj.__getinitargs__()
len(args) # XXX Assert it's a sequence
pickle._keep_alive(args, memo)
else:
args = ()
write(pickle.MARK)
if self.bin:
save(cls)
for arg in args:
save(arg)
write(pickle.OBJ)
else:
for arg in args:
save(arg)
write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')
self.memoize(obj)
try:
getstate = obj.__getstate__
except AttributeError:
stuff = obj.__dict__
#remove items if transient
if hasattr(obj, '__transient__'):
transient = obj.__transient__
stuff = stuff.copy()
for k in list(stuff.keys()):
if k in transient:
del stuff[k]
else:
stuff = getstate()
pickle._keep_alive(stuff, memo)
save(stuff)
write(pickle.BUILD)
if not PY3:
dispatch[types.InstanceType] = save_inst
def save_property(self, obj):
# properties not correctly saved in python
self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj)
dispatch[property] = save_property
def save_itemgetter(self, obj):
"""itemgetter serializer (needed for namedtuple support)"""
class Dummy:
def __getitem__(self, item):
return item
items = obj(Dummy())
if not isinstance(items, tuple):
items = (items, )
return self.save_reduce(operator.itemgetter, items)
if type(operator.itemgetter) is type:
dispatch[operator.itemgetter] = save_itemgetter
def save_attrgetter(self, obj):
"""attrgetter serializer"""
class Dummy(object):
def __init__(self, attrs, index=None):
self.attrs = attrs
self.index = index
def __getattribute__(self, item):
attrs = object.__getattribute__(self, "attrs")
index = object.__getattribute__(self, "index")
if index is None:
index = len(attrs)
attrs.append(item)
else:
attrs[index] = ".".join([attrs[index], item])
return type(self)(attrs, index)
attrs = []
obj(Dummy(attrs))
return self.save_reduce(operator.attrgetter, tuple(attrs))
if type(operator.attrgetter) is type:
dispatch[operator.attrgetter] = save_attrgetter
def save_reduce(self, func, args, state=None,
listitems=None, dictitems=None, obj=None):
"""Modified to support __transient__ on new objects
Change only affects protocol level 2 (which is always used by PiCloud"""
# Assert that args is a tuple or None
if not isinstance(args, tuple):
raise pickle.PicklingError("args from reduce() should be a tuple")
# Assert that func is callable
if not hasattr(func, '__call__'):
raise pickle.PicklingError("func from reduce should be callable")
save = self.save
write = self.write
# Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
#Added fix to allow transient
cls = args[0]
if not hasattr(cls, "__new__"):
raise pickle.PicklingError(
"args[0] from __newobj__ args has no __new__")
if obj is not None and cls is not obj.__class__:
raise pickle.PicklingError(
"args[0] from __newobj__ args has the wrong class")
args = args[1:]
save(cls)
#Don't pickle transient entries
if hasattr(obj, '__transient__'):
transient = obj.__transient__
state = state.copy()
for k in list(state.keys()):
if k in transient:
del state[k]
save(args)
write(pickle.NEWOBJ)
else:
save(func)
save(args)
write(pickle.REDUCE)
if obj is not None:
self.memoize(obj)
# More new special cases (that work with older protocols as
# well): when __reduce__ returns a tuple with 4 or 5 items,
# the 4th and 5th item should be iterators that provide list
# items and dict items (as (key, value) tuples), or None.
if listitems is not None:
self._batch_appends(listitems)
if dictitems is not None:
self._batch_setitems(dictitems)
if state is not None:
save(state)
write(pickle.BUILD)
def save_partial(self, obj):
"""Partial objects do not serialize correctly in python2.x -- this fixes the bugs"""
self.save_reduce(_genpartial, (obj.func, obj.args, obj.keywords))
if sys.version_info < (2,7): # 2.7 supports partial pickling
dispatch[partial] = save_partial
def save_file(self, obj):
"""Save a file"""
try:
import StringIO as pystringIO #we can't use cStringIO as it lacks the name attribute
except ImportError:
import io as pystringIO
if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
raise pickle.PicklingError("Cannot pickle files that do not map to an actual file")
if obj is sys.stdout:
return self.save_reduce(getattr, (sys,'stdout'), obj=obj)
if obj is sys.stderr:
return self.save_reduce(getattr, (sys,'stderr'), obj=obj)
if obj is sys.stdin:
raise pickle.PicklingError("Cannot pickle standard input")
if hasattr(obj, 'isatty') and obj.isatty():
raise pickle.PicklingError("Cannot pickle files that map to tty objects")
if 'r' not in obj.mode:
raise pickle.PicklingError("Cannot pickle files that are not opened for reading")
name = obj.name
try:
fsize = os.stat(name).st_size
except OSError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be stat" % name)
if obj.closed:
#create an empty closed string io
retval = pystringIO.StringIO("")
retval.close()
elif not fsize: #empty file
retval = pystringIO.StringIO("")
try:
tmpfile = file(name)
tst = tmpfile.read(1)
except IOError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be read" % name)
tmpfile.close()
if tst != '':
raise pickle.PicklingError("Cannot pickle file %s as it does not appear to map to a physical, real file" % name)
else:
try:
tmpfile = file(name)
contents = tmpfile.read()
tmpfile.close()
except IOError:
raise pickle.PicklingError("Cannot pickle file %s as it cannot be read" % name)
retval = pystringIO.StringIO(contents)
curloc = obj.tell()
retval.seek(curloc)
retval.name = name
self.save(retval)
self.memoize(obj)
if PY3:
dispatch[io.TextIOWrapper] = save_file
else:
dispatch[file] = save_file
"""Special functions for Add-on libraries"""
def inject_numpy(self):
numpy = sys.modules.get('numpy')
if not numpy or not hasattr(numpy, 'ufunc'):
return
self.dispatch[numpy.ufunc] = self.__class__.save_ufunc
def save_ufunc(self, obj):
"""Hack function for saving numpy ufunc objects"""
name = obj.__name__
numpy_tst_mods = ['numpy', 'scipy.special']
for tst_mod_name in numpy_tst_mods:
tst_mod = sys.modules.get(tst_mod_name, None)
if tst_mod and name in tst_mod.__dict__:
return self.save_reduce(_getobject, (tst_mod_name, name))
raise pickle.PicklingError('cannot save %s. Cannot resolve what module it is defined in'
% str(obj))
def inject_unity_proxy(self):
# get the top level module
gl = __import__(__name__.split('.')[0])
if not gl:
return
## Make sure the unity objects are not picklable ##
self.dispatch[gl.SArray] = self.__class__.save_unsupported
self.dispatch[gl.SFrame] = self.__class__.save_unsupported
self.dispatch[gl.SGraph] = self.__class__.save_unsupported
self.dispatch[gl.Graph] = self.__class__.save_unsupported
self.dispatch[gl.Sketch] = self.__class__.save_unsupported
self.dispatch[gl.Model] = self.__class__.save_unsupported
## Make sure the underlying cython objects are not picklable ##
self.dispatch[gl.cython.cy_sarray.UnitySArrayProxy] = self.__class__.save_unsupported
self.dispatch[gl.cython.cy_sframe.UnitySFrameProxy] = self.__class__.save_unsupported
self.dispatch[gl.cython.cy_sketch.UnitySketchProxy] = self.__class__.save_unsupported
self.dispatch[gl.cython.cy_graph.UnityGraphProxy] = self.__class__.save_unsupported
self.dispatch[gl.cython.cy_model.UnityModel] = self.__class__.save_unsupported
self.dispatch[gl.cython.cy_ipc.PyCommClient] = self.__class__.save_unsupported
def inject_addons(self):
"""Plug in system. Register additional pickling functions if modules already loaded"""
self.inject_numpy()
self.inject_unity_proxy()
# Shorthands for legacy support
def dump(obj, file, protocol=2):
CloudPickler(file, protocol).dump(obj)
def dumps(obj, protocol=2):
file = StringIO()
cp = CloudPickler(file,protocol)
cp.dump(obj)
return file.getvalue()
#hack for __import__ not working as desired
def subimport(name):
__import__(name)
return sys.modules[name]
# restores function attributes
def _restore_attr(obj, attr):
for key, val in attr.items():
setattr(obj, key, val)
return obj
def _get_module_builtins():
return pickle.__builtins__
def print_exec(stream):
ei = sys.exc_info()
traceback.print_exception(ei[0], ei[1], ei[2], None, stream)
def _modules_to_main(modList):
"""Force every module in modList to be placed into main"""
if not modList:
return
main = sys.modules['__main__']
for modname in modList:
if type(modname) is str:
try:
mod = __import__(modname)
except Exception as e:
sys.stderr.write('warning: could not import %s\n. '
'Your function may unexpectedly error due to this import failing;'
'A version mismatch is likely. Specific error was:\n' % modname)
print_exec(sys.stderr)
else:
setattr(main, mod.__name__, mod)
#object generators:
def _genpartial(func, args, kwds):
if not args:
args = ()
if not kwds:
kwds = {}
return partial(func, *args, **kwds)
def _fill_function(func, globals, defaults, closures, dict):
""" Fills in the rest of function data into the skeleton function object
that were created via _make_skel_func().
"""
closure = _reconstruct_closure(closures) if closures else None
if sys.version_info.major == 2:
func = types.FunctionType(func.__code__, func.func_globals,
None, None, closure)
func.func_globals.update(globals)
func.func_defaults = defaults
func.func_dict = dict
else:
func = types.FunctionType(func.__code__, func.__globals__,
None, None, closure)
func.__globals__.update(globals)
func.__defaults__ = defaults
func.__dict__ = dict
return func
def _make_cell(value):
return (lambda: value).__closure__[0]
def _reconstruct_closure(values):
return tuple([_make_cell(v) for v in values])
def _make_skel_func(code, num_closures, base_globals = None):
""" Creates a skeleton function object that contains just the provided
code and the correct number of cells in func_closure. All other
func attributes (e.g. func_globals) are empty.
"""
dummy_closure = tuple(map(lambda i:_make_cell(None), range(num_closures)))
if base_globals is None:
base_globals = {}
base_globals['__builtins__'] = __builtins__
return types.FunctionType(code, base_globals,
None, None, dummy_closure)
def _load_class(cls, d):
"""
Loads additional properties into class `cls`.
"""
for k, v in d.items():
if isinstance(k, tuple):
typ, k = k
if typ == 'property':
v = property(*v)
elif typ == 'staticmethod':
v = staticmethod(v)
elif typ == 'classmethod':
v = classmethod(v)
setattr(cls, k, v)
return cls
def _load_namedtuple(name, fields):
"""
Loads a class generated by namedtuple
"""
from collections import namedtuple
return namedtuple(name, fields)
"""Constructors for 3rd party libraries
Note: These can never be renamed due to client compatibility issues"""
def _getobject(modname, attribute):
mod = __import__(modname, fromlist=[attribute])
return mod.__dict__[attribute]
| TobyRoseman/SFrame | oss_src/unity/python/sframe/util/cloudpickle.py | Python | bsd-3-clause | 28,239 |
import json
import urllib
from collections import OrderedDict
from datetime import datetime
from django.conf import settings
from django.core.cache import cache
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _lazy
import commonware.log
from elasticsearch_dsl import Search
from elasticsearch_dsl import filter as es_filter
import waffle
import mkt
from lib.iarc_v2.client import (publish as iarc_publish,
unpublish as iarc_unpublish)
from mkt.abuse.models import AbuseReport
from mkt.access import acl
from mkt.comm.utils import create_comm_note
from mkt.constants import comm
from mkt.files.models import File
from mkt.ratings.models import Review
from mkt.reviewers.models import EscalationQueue, RereviewQueue, ReviewerScore
from mkt.site.helpers import product_as_dict
from mkt.site.models import manual_order
from mkt.site.utils import cached_property, JSONEncoder
from mkt.translations.query import order_by_translation
from mkt.versions.models import Version
from mkt.webapps.models import Webapp
from mkt.webapps.indexers import HomescreenIndexer, WebappIndexer
from mkt.webapps.tasks import set_storefront_data
from mkt.websites.models import Website
log = commonware.log.getLogger('z.mailer')
def get_review_type(request, addon, version):
if EscalationQueue.objects.filter(addon=addon).exists():
queue = 'escalated'
elif RereviewQueue.objects.filter(addon=addon).exists():
queue = 'rereview'
elif addon.is_homescreen():
queue = 'homescreen'
else:
queue = 'pending'
return queue
class ReviewBase(object):
def __init__(self, request, addon, version, attachment_formset=None,
testedon_formset=None):
self.request = request
self.user = self.request.user
self.addon = addon
self.version = version
self.review_type = get_review_type(request, addon, version)
self.files = None
self.comm_thread = None
self.attachment_formset = attachment_formset
self.testedon_formset = testedon_formset
self.in_rereview = RereviewQueue.objects.filter(
addon=self.addon).exists()
self.in_escalate = EscalationQueue.objects.filter(
addon=self.addon).exists()
def get_attachments(self):
"""
Returns a list of triples suitable to be attached to an email.
"""
try:
num = int(self.attachment_formset.data['attachment-TOTAL_FORMS'])
except (ValueError, TypeError):
return []
else:
files = []
for i in xrange(num):
attachment_name = 'attachment-%d-attachment' % i
attachment = self.request.FILES.get(attachment_name)
if attachment:
attachment.open()
files.append((attachment.name, attachment.read(),
attachment.content_type))
return files
def set_addon(self, **kw):
"""Alters addon using provided kwargs."""
self.addon.update(_signal=False, **kw)
def set_reviewed(self):
"""Sets reviewed timestamp on version."""
self.version.update(_signal=False, reviewed=datetime.now())
def set_files(self, status, files, hide_disabled_file=False):
"""Change the files to be the new status and hide as appropriate."""
for file in files:
file.update(_signal=False, datestatuschanged=datetime.now(),
reviewed=datetime.now(), status=status)
if hide_disabled_file:
file.hide_disabled_file()
def create_note(self, action):
"""
Permissions default to developers + reviewers + Mozilla contacts.
For escalation/comment, exclude the developer from the conversation.
"""
details = {'comments': self.data['comments'],
'reviewtype': self.review_type}
if self.files:
details['files'] = [f.id for f in self.files]
tested = self.get_tested() # You really should...
if tested:
self.data['comments'] += '\n\n%s' % tested
# Commbadge (the future).
note_type = comm.ACTION_MAP(action.id)
self.comm_thread, self.comm_note = create_comm_note(
self.addon, self.version, self.request.user,
self.data['comments'], note_type=note_type,
attachments=self.attachment_formset)
# ActivityLog (ye olde).
mkt.log(action, self.addon, self.version, user=self.user,
created=datetime.now(), details=details)
def get_tested(self):
"""
Get string indicating devices/browsers used by reviewer to test.
Will be automatically attached to the note body.
"""
tested_on_text = []
if not self.testedon_formset:
return ''
for form in self.testedon_formset.forms:
if form.cleaned_data:
dtype = form.cleaned_data.get('device_type', None)
device = form.cleaned_data.get('device', None)
version = form.cleaned_data.get('version', None)
if device and version:
text = ('%s platform on %s with version %s' %
(dtype, device, version))
elif device and not version:
text = '%s platform on %s' % (dtype, device)
elif not device and version:
text = '%s with version %s' % (dtype, version)
else:
text = dtype
if text:
tested_on_text.append(text)
if not len(tested_on_text):
return ''
else:
return 'Tested on ' + '; '.join(tested_on_text)
class ReviewApp(ReviewBase):
def set_data(self, data):
self.data = data
self.files = self.version.files.all()
def process_approve(self):
"""
Handle the approval of apps and/or files.
"""
if self.addon.has_incomplete_status():
# Failsafe.
return
# Hold onto the status before we change it.
status = self.addon.status
if self.addon.publish_type == mkt.PUBLISH_IMMEDIATE:
self._process_public(mkt.STATUS_PUBLIC)
elif self.addon.publish_type == mkt.PUBLISH_HIDDEN:
self._process_public(mkt.STATUS_UNLISTED)
else:
self._process_private()
# Note: Post save signals shouldn't happen here. All the set_*()
# methods pass _signal=False to prevent them from being sent. They are
# manually triggered in the view after the transaction is committed to
# avoid multiple indexing tasks getting fired with stale data.
#
# This does mean that we need to call update_version() manually to get
# the addon in the correct state before updating names. We do that,
# passing _signal=False again to prevent it from sending
# 'version_changed'. The post_save() that happen in the view will
# call it without that parameter, sending 'version_changed' normally.
self.addon.update_version(_signal=False)
if self.addon.is_packaged:
self.addon.update_name_from_package_manifest()
self.addon.update_supported_locales()
self.addon.resend_version_changed_signal = True
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
# Clear priority_review flag on approval - its not persistant.
if self.addon.priority_review:
self.addon.update(priority_review=False)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.addon,
status)
def _process_private(self):
"""Make an app private."""
if self.addon.has_incomplete_status():
# Failsafe.
return
self.addon.sign_if_packaged(self.version.pk)
# If there are no prior PUBLIC versions we set the file status to
# PUBLIC no matter what ``publish_type`` was chosen since at least one
# version needs to be PUBLIC when an app is approved to set a
# ``current_version``.
if File.objects.filter(version__addon__pk=self.addon.pk,
status=mkt.STATUS_PUBLIC).count() == 0:
self.set_files(mkt.STATUS_PUBLIC, self.version.files.all())
else:
self.set_files(mkt.STATUS_APPROVED, self.version.files.all())
if self.addon.status not in (mkt.STATUS_PUBLIC, mkt.STATUS_UNLISTED):
self.set_addon(status=mkt.STATUS_APPROVED,
highest_status=mkt.STATUS_APPROVED)
self.set_reviewed()
self.create_note(mkt.LOG.APPROVE_VERSION_PRIVATE)
log.info(u'Making %s approved' % self.addon)
def _process_public(self, status):
"""Changes status to a publicly viewable status."""
if self.addon.has_incomplete_status():
# Failsafe.
return
self.addon.sign_if_packaged(self.version.pk)
# Save files first, because set_addon checks to make sure there
# is at least one public file or it won't make the addon public.
self.set_files(mkt.STATUS_PUBLIC, self.version.files.all())
# If app is already an approved status, don't change it when approving
# a version.
if self.addon.status not in mkt.WEBAPPS_APPROVED_STATUSES:
self.set_addon(status=status, highest_status=status)
self.set_reviewed()
if waffle.switch_is_active('iarc-upgrade-v2'):
iarc_publish.delay(self.addon.pk)
else:
set_storefront_data.delay(self.addon.pk)
self.create_note(mkt.LOG.APPROVE_VERSION)
log.info(u'Making %s public' % self.addon)
def process_reject(self):
"""
Reject an app.
Changes status to Rejected.
Creates Rejection note.
"""
# Hold onto the status before we change it.
status = self.addon.status
self.set_files(mkt.STATUS_REJECTED, self.version.files.all(),
hide_disabled_file=True)
# If this app is not packaged (packaged apps can have multiple
# versions) or if there aren't other versions with already reviewed
# files, reject the app also.
if (not self.addon.is_packaged or
not self.addon.versions.exclude(id=self.version.id)
.filter(files__status__in=mkt.REVIEWED_STATUSES).exists()):
self.set_addon(status=mkt.STATUS_REJECTED)
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
if self.in_rereview:
RereviewQueue.objects.filter(addon=self.addon).delete()
self.create_note(mkt.LOG.REJECT_VERSION)
log.info(u'Making %s disabled' % self.addon)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.addon,
status, in_rereview=self.in_rereview)
def process_request_information(self):
"""Send a message to the authors."""
self.create_note(mkt.LOG.REQUEST_INFORMATION)
self.version.update(has_info_request=True)
log.info(u'Sending reviewer message for %s to authors' % self.addon)
def process_escalate(self):
"""
Ask for escalation for an app (EscalationQueue).
Doesn't change status.
Creates Escalation note.
"""
EscalationQueue.objects.get_or_create(addon=self.addon)
self.create_note(mkt.LOG.ESCALATE_MANUAL)
log.info(u'Escalated review requested for %s' % self.addon)
def process_comment(self):
"""
Editor comment (not visible to developer).
Doesn't change status.
Creates Reviewer Comment note.
"""
self.version.update(has_editor_comment=True)
self.create_note(mkt.LOG.COMMENT_VERSION)
def process_manual_rereview(self):
"""
Adds the app to the rereview queue.
Doesn't change status.
Creates Reviewer Comment note.
"""
RereviewQueue.objects.get_or_create(addon=self.addon)
self.create_note(mkt.LOG.REREVIEW_MANUAL)
log.info(u'Re-review manually requested for %s' % self.addon)
def process_clear_escalation(self):
"""
Clear app from escalation queue.
Doesn't change status.
Creates Reviewer-only note.
"""
EscalationQueue.objects.filter(addon=self.addon).delete()
self.create_note(mkt.LOG.ESCALATION_CLEARED)
log.info(u'Escalation cleared for app: %s' % self.addon)
def process_clear_rereview(self):
"""
Clear app from re-review queue.
Doesn't change status.
Creates Reviewer-only note.
"""
RereviewQueue.objects.filter(addon=self.addon).delete()
self.create_note(mkt.LOG.REREVIEW_CLEARED)
log.info(u'Re-review cleared for app: %s' % self.addon)
# Assign reviewer incentive scores.
return ReviewerScore.award_points(self.request.user, self.addon,
self.addon.status, in_rereview=True)
def process_disable(self):
"""
Bans app from Marketplace, clears app from all queues.
Changes status to Disabled.
Creates Banned/Disabled note.
"""
if not acl.action_allowed(self.request, 'Apps', 'Edit'):
return
# Disable disables all files, not just those in this version.
self.set_files(mkt.STATUS_DISABLED,
File.objects.filter(version__addon=self.addon),
hide_disabled_file=True)
self.addon.update(status=mkt.STATUS_DISABLED)
if self.in_escalate:
EscalationQueue.objects.filter(addon=self.addon).delete()
if self.in_rereview:
RereviewQueue.objects.filter(addon=self.addon).delete()
if waffle.switch_is_active('iarc-upgrade-v2'):
iarc_unpublish.delay(self.addon.pk)
else:
set_storefront_data.delay(self.addon.pk, disable=True)
self.create_note(mkt.LOG.APP_DISABLED)
log.info(u'App %s has been banned by a reviewer.' % self.addon)
class ReviewHelper(object):
"""
A class that builds enough to render the form back to the user and
process off to the correct handler.
"""
def __init__(self, request=None, addon=None, version=None,
attachment_formset=None, testedon_formset=None):
self.handler = None
self.required = {}
self.addon = addon
self.version = version
self.all_files = version and version.files.all()
self.attachment_formset = attachment_formset
self.testedon_formset = testedon_formset
self.handler = ReviewApp(request, addon, version,
attachment_formset=self.attachment_formset,
testedon_formset=self.testedon_formset)
self.review_type = self.handler.review_type
self.actions = self.get_actions()
def set_data(self, data):
self.handler.set_data(data)
def get_actions(self):
"""Get the appropriate handler based on the action."""
public = {
'method': self.handler.process_approve,
'minimal': False,
'label': _lazy(u'Approve'),
'details': _lazy(u'This will approve the app and allow the '
u'author(s) to publish it.')}
reject = {
'method': self.handler.process_reject,
'label': _lazy(u'Reject'),
'minimal': False,
'details': _lazy(u'This will reject the app, remove it from '
u'the review queue and un-publish it if already '
u'published.')}
info = {
'method': self.handler.process_request_information,
'label': _lazy(u'Message developer'),
'minimal': True,
'details': _lazy(u'This will send the author(s) - and other '
u'thread subscribers - a message. This will not '
u'change the app\'s status.')}
escalate = {
'method': self.handler.process_escalate,
'label': _lazy(u'Escalate'),
'minimal': True,
'details': _lazy(u'Flag this app for an admin to review. The '
u'comments are sent to the admins, '
u'not the author(s).')}
comment = {
'method': self.handler.process_comment,
'label': _lazy(u'Private comment'),
'minimal': True,
'details': _lazy(u'Make a private reviewer comment on this app. '
u'The message won\'t be visible to the '
u'author(s), and no notification will be sent '
u'them.')}
manual_rereview = {
'method': self.handler.process_manual_rereview,
'label': _lazy(u'Request Re-review'),
'minimal': True,
'details': _lazy(u'Add this app to the re-review queue. Any '
u'comments here won\'t be visible to the '
u'author(s), and no notification will be sent to '
u'them.')}
clear_escalation = {
'method': self.handler.process_clear_escalation,
'label': _lazy(u'Clear Escalation'),
'minimal': True,
'details': _lazy(u'Clear this app from the escalation queue. The '
u'author(s) will get no email or see comments '
u'here.')}
clear_rereview = {
'method': self.handler.process_clear_rereview,
'label': _lazy(u'Clear Re-review'),
'minimal': True,
'details': _lazy(u'Clear this app from the re-review queue. The '
u'author(s) will get no email or see comments '
u'here.')}
disable = {
'method': self.handler.process_disable,
'label': _lazy(u'Ban app'),
'minimal': True,
'details': _lazy(u'Ban the app from Marketplace. Similar to '
u'Reject but the author(s) can\'t resubmit. To '
u'only be used in extreme cases.')}
actions = OrderedDict()
if not self.version:
# Return early if there is no version, this app is incomplete.
return actions
file_status = self.version.files.values_list('status', flat=True)
multiple_versions = (File.objects.exclude(version=self.version)
.filter(
version__addon=self.addon,
status__in=mkt.REVIEWED_STATUSES)
.exists())
show_privileged = (not self.version.is_privileged or
acl.action_allowed(self.handler.request, 'Apps',
'ReviewPrivileged'))
# Public.
if ((self.addon.is_packaged and
mkt.STATUS_PUBLIC not in file_status and show_privileged) or
(not self.addon.is_packaged and
self.addon.status != mkt.STATUS_PUBLIC)):
actions['public'] = public
# Reject.
if self.addon.is_packaged and show_privileged:
# Packaged apps reject the file only, or the app itself if there's
# only a single version.
if (not multiple_versions and
self.addon.status not in [mkt.STATUS_REJECTED,
mkt.STATUS_DISABLED]):
actions['reject'] = reject
elif multiple_versions and mkt.STATUS_DISABLED not in file_status:
actions['reject'] = reject
elif not self.addon.is_packaged:
# Hosted apps reject the app itself.
if self.addon.status not in [mkt.STATUS_REJECTED,
mkt.STATUS_DISABLED]:
actions['reject'] = reject
# Ban/Disable.
if (acl.action_allowed(self.handler.request, 'Apps', 'Edit') and (
self.addon.status != mkt.STATUS_DISABLED or
mkt.STATUS_DISABLED not in file_status)):
actions['disable'] = disable
# Clear re-review.
if self.handler.in_rereview:
actions['clear_rereview'] = clear_rereview
else:
# Manual re-review.
actions['manual_rereview'] = manual_rereview
# Clear escalation.
if self.handler.in_escalate:
actions['clear_escalation'] = clear_escalation
else:
# Escalate.
actions['escalate'] = escalate
# Request info and comment are always shown.
actions['info'] = info
actions['comment'] = comment
return actions
def process(self):
"""Call handler."""
action = self.handler.data.get('action', '')
if not action:
raise NotImplementedError
return self.actions[action]['method']()
def clean_sort_param(request, date_sort='created'):
"""
Handles empty and invalid values for sort and sort order.
'created' by ascending is the default ordering.
"""
sort = request.GET.get('sort', date_sort)
order = request.GET.get('order', 'asc')
if sort not in ('name', 'created', 'nomination'):
sort = date_sort
if order not in ('desc', 'asc'):
order = 'asc'
return sort, order
def clean_sort_param_es(request, date_sort='created'):
"""
Handles empty and invalid values for sort and sort order.
'created' by ascending is the default ordering.
"""
sort_map = {
'name': 'name_sort',
'nomination': 'latest_version.nomination_date',
}
sort = request.GET.get('sort', date_sort)
order = request.GET.get('order', 'asc')
if sort not in ('name', 'created', 'nomination'):
sort = date_sort
sort = sort_map.get(sort, date_sort)
if order not in ('desc', 'asc'):
order = 'asc'
return sort, order
def create_sort_link(pretty_name, sort_field, get_params, sort, order):
"""Generate table header sort links.
pretty_name -- name displayed on table header
sort_field -- name of the sort_type GET parameter for the column
get_params -- additional get_params to include in the sort_link
sort -- the current sort type
order -- the current sort order
"""
get_params.append(('sort', sort_field))
if sort == sort_field and order == 'asc':
# Have link reverse sort order to desc if already sorting by desc.
get_params.append(('order', 'desc'))
else:
# Default to ascending.
get_params.append(('order', 'asc'))
# Show little sorting sprite if sorting by this field.
url_class = ''
if sort == sort_field:
url_class = ' class="sort-icon ed-sprite-sort-%s"' % order
return u'<a href="?%s"%s>%s</a>' % (urllib.urlencode(get_params, True),
url_class, pretty_name)
class AppsReviewing(object):
"""
Class to manage the list of apps a reviewer is currently reviewing.
Data is stored in memcache.
"""
def __init__(self, request):
self.request = request
self.user_id = request.user.id
self.key = '%s:myapps:%s' % (settings.CACHE_PREFIX, self.user_id)
def get_apps(self):
ids = []
my_apps = cache.get(self.key)
if my_apps:
for id in my_apps.split(','):
valid = cache.get(
'%s:review_viewing:%s' % (settings.CACHE_PREFIX, id))
if valid and valid == self.user_id:
ids.append(id)
apps = []
for app in Webapp.objects.filter(id__in=ids):
apps.append({
'app': app,
'app_attrs': json.dumps(
product_as_dict(self.request, app, False, 'reviewer'),
cls=JSONEncoder),
})
return apps
def add(self, addon_id):
my_apps = cache.get(self.key)
if my_apps:
apps = my_apps.split(',')
else:
apps = []
apps.append(addon_id)
cache.set(self.key, ','.join(map(str, set(apps))),
mkt.EDITOR_VIEWING_INTERVAL * 2)
def log_reviewer_action(addon, user, msg, action, **kwargs):
create_comm_note(addon, addon.latest_version, user, msg,
note_type=comm.ACTION_MAP(action.id))
mkt.log(action, addon, addon.latest_version, details={'comments': msg},
**kwargs)
def search_webapps_and_homescreens():
return (Search(using=WebappIndexer.get_es(),
index=[settings.ES_INDEXES['homescreen'],
settings.ES_INDEXES['webapp']],
doc_type=['homescreen', 'webapp'])
.extra(_source={'exclude': WebappIndexer.hidden_fields}))
class ReviewersQueuesHelper(object):
def __init__(self, request=None, use_es=False):
self.request = request
self.use_es = use_es
@cached_property
def excluded_ids(self):
# We need to exclude Escalated Apps from almost all queries, so store
# the result once.
return self.get_escalated_queue().values_list('addon', flat=True)
def get_escalated_queue(self):
# Apps and homescreens flagged for escalation go in this queue.
if self.use_es:
must = [
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=True),
]
return search_webapps_and_homescreens().filter('bool', must=must)
return EscalationQueue.objects.filter(
addon__disabled_by_user=False)
def get_pending_queue(self):
# Unreviewed apps go in this queue.
if self.use_es:
must = [
es_filter.Term(status=mkt.STATUS_PENDING),
es_filter.Term(**{'latest_version.status':
mkt.STATUS_PENDING}),
es_filter.Term(is_escalated=False),
es_filter.Term(is_disabled=False),
]
return WebappIndexer.search().filter('bool', must=must)
return (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.exclude(addon__id__in=self.excluded_ids)
.exclude(addon__tags__tag_text='homescreen')
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
def get_homescreen_queue(self):
# Both unreviewed homescreens and published homescreens with new
# unreviewed versions go in this queue.
if self.use_es:
must = [
es_filter.Term(**{'latest_version.status':
mkt.STATUS_PENDING}),
es_filter.Term(is_escalated=False),
es_filter.Term(is_disabled=False),
]
return HomescreenIndexer.search().filter('bool', must=must)
return (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__tags__tag_text='homescreen')
.exclude(addon__id__in=self.excluded_ids)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
def get_rereview_queue(self):
# Apps and homescreens flagged for re-review go in this queue.
if self.use_es:
must = [
es_filter.Term(is_rereviewed=True),
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=False),
]
return search_webapps_and_homescreens().filter('bool', must=must)
return (RereviewQueue.objects.
filter(addon__disabled_by_user=False).
exclude(addon__in=self.excluded_ids))
def get_updates_queue(self):
# Updated apps, i.e. apps that have been published but have new
# unreviewed versions, go in this queue.
if self.use_es:
must = [
es_filter.Terms(status=mkt.WEBAPPS_APPROVED_STATUSES),
es_filter.Term(**{'latest_version.status':
mkt.STATUS_PENDING}),
es_filter.Terms(app_type=[mkt.ADDON_WEBAPP_PACKAGED,
mkt.ADDON_WEBAPP_PRIVILEGED]),
es_filter.Term(is_disabled=False),
es_filter.Term(is_escalated=False),
]
return WebappIndexer.search().filter('bool', must=must)
return (Version.objects.filter(
# Note: this will work as long as we disable files of existing
# unreviewed versions when a new version is uploaded.
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__is_packaged=True,
addon__status__in=mkt.WEBAPPS_APPROVED_STATUSES)
.exclude(addon__id__in=self.excluded_ids)
.exclude(addon__tags__tag_text='homescreen')
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
def get_moderated_queue(self):
return (Review.objects
.exclude(Q(addon__isnull=True) | Q(reviewflag__isnull=True))
.exclude(addon__status=mkt.STATUS_DELETED)
.filter(editorreview=True)
.order_by('reviewflag__created'))
def get_abuse_queue(self):
report_ids = (AbuseReport.objects
.exclude(addon__isnull=True)
.exclude(addon__status=mkt.STATUS_DELETED)
.filter(read=False)
.select_related('addon')
.values_list('addon', flat=True))
return Webapp.objects.filter(id__in=report_ids).order_by('created')
def get_abuse_queue_websites(self):
report_ids = (AbuseReport.objects
.exclude(website__isnull=True)
.exclude(website__status=mkt.STATUS_DELETED)
.filter(read=False)
.select_related('website')
.values_list('website', flat=True))
return Website.objects.filter(id__in=report_ids).order_by('created')
def sort(self, qs, date_sort='created'):
"""Given a queue queryset, return the sorted version."""
if self.use_es:
return self._do_sort_es(qs, date_sort)
if qs.model == Webapp:
return self._do_sort_webapp(qs, date_sort)
return self._do_sort_queue_obj(qs, date_sort)
def _do_sort_webapp(self, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
"""
sort_type, order = clean_sort_param(self.request, date_sort=date_sort)
order_by = ('-' if order == 'desc' else '') + sort_type
# Sort.
if sort_type == 'name':
# Sorting by name translation.
return order_by_translation(qs, order_by)
else:
return qs.order_by('-priority_review', order_by)
def _do_sort_queue_obj(self, qs, date_sort):
"""
Column sorting logic based on request GET parameters.
Deals with objects with joins on the Addon (e.g. RereviewQueue,
Version). Returns qs of apps.
"""
sort_type, order = clean_sort_param(self.request, date_sort=date_sort)
sort_str = sort_type
if sort_type not in [date_sort, 'name']:
sort_str = 'addon__' + sort_type
# sort_str includes possible joins when ordering.
# sort_type is the name of the field to sort on without desc/asc
# markers. order_by is the name of the field to sort on with desc/asc
# markers.
order_by = ('-' if order == 'desc' else '') + sort_str
# Sort.
if sort_type == 'name':
# Sorting by name translation through an addon foreign key.
return order_by_translation(
Webapp.objects.filter(
id__in=qs.values_list('addon', flat=True)), order_by)
# Convert sorted queue object queryset to sorted app queryset.
sorted_app_ids = (qs.order_by('-addon__priority_review', order_by)
.values_list('addon', flat=True))
qs = Webapp.objects.filter(id__in=sorted_app_ids)
return manual_order(qs, sorted_app_ids, 'addons.id')
def _do_sort_es(self, qs, date_sort):
sort_type, order = clean_sort_param_es(self.request,
date_sort=date_sort)
order_by = ('-' if order == 'desc' else '') + sort_type
return qs.sort(order_by)
| ingenioustechie/zamboni | mkt/reviewers/utils.py | Python | bsd-3-clause | 33,546 |
from bottle import template, route, run, request
from imp import load_source
from argparse import ArgumentParser
from os.path import basename, splitext
from subprocess import check_output
import os
class ScriptRender(object):
"""Render a script as an HTML page."""
def __init__(self, script):
self.script = script
def render(self):
return template(self.get_template(), {'self': self})
def get_template(self):
return 'page'
def get_title(self):
title, _ = splitext(basename(self.script))
return title
def get_argparsers(self):
mod = load_source('', self.script)
f = lambda x: isinstance(x, ArgumentParser)
return filter(f, mod.__dict__.values())
def render_argparser(self, argparser):
return ArgparserRender(argparser).render()
class ArgparserRender(object):
"""Render an argparse object as an HTML form."""
def __init__(self, argparser):
self.argparser = argparser
def render(self):
return template(self.get_template(), {'self': self})
def get_template(self):
return 'form'
def get_description(self):
return self.argparser.description
def get_groups(self):
return self.argparser._action_groups
def render_group(self, group):
return GroupRender(group).render()
def get_epilog(self):
return self.argparser.epilog
class GroupRender(object):
"""Render an action group as an HTML formset."""
def __init__(self, group):
self.group = group
def render(self):
return template(self.get_template(), {'self': self})
def get_template(self):
return 'formset'
def get_title(self):
return self.group.title
def get_actions(self):
actions = self.group._group_actions
no_help = lambda a: not type(a).__name__ == '_HelpAction'
return filter(no_help, actions)
def render_action(self, action):
return ActionRender(action).render()
class ActionRender(object):
"""Render an action as an HTML field."""
def __init__(self, action):
self.action = action
def render(self):
return template(self.get_template(), {'self': self})
def get_template(self):
return 'field'
def get_flag(self):
opt = self.action.option_strings
if len(opt) > 0:
return opt[0]
return None
def get_name(self):
flag = self.get_flag()
if flag:
return flag.strip('-')
return self.action.dest
def get_required(self):
return 'required' if self.action.required else ''
def get_default(self):
value = self.action.default
if hasattr(value, '__call__'):
return value.__name__
return value
def get_help(self):
return self.action.help
def get_type(self):
kls = type(self.action).__name__
fmt = '_Store%sAction'
if kls in [fmt % x for x in ('Const', 'True', 'False')]:
return 'checkbox'
elif kls == '_StoreAction':
typ = self.action.type.__name__
mpg = {'int': 'number',
'file': 'file'}
if typ in mpg:
return mpg[typ]
return ''
@route('/')
def send_form():
return __R__.render()
@route('/', method='POST')
def process_form():
args = []
for argparser in __R__.get_argparsers():
argparser_render = ArgparserRender(argparser)
for group in argparser_render.get_groups():
group_render = GroupRender(group)
for action in group_render.get_actions():
action_render = ActionRender(action)
name = action_render.get_name()
value = request.forms.get(name)
if value:
flag = action_render.get_flag()
if flag:
args = args + [flag]
args = args + [value]
print ['python'] + [__R__.script] + args
return check_output(['python'] + [__R__.script] + args)
parser = ArgumentParser(description='Web Apps from CLI scripts.')
parser.add_argument('script', type=file)
if __name__ == '__main__':
args = parser.parse_args()
global __R__
__R__ = ScriptRender(args.script.name)
run(host='localhost', port=8080) | alixedi/recline | recline.py | Python | mit | 3,765 |
# -*- coding: utf8 -*-
#
# Copyright (C) 2014 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import test_supplier_price_validity
| odoousers2014/odoo-addons-supplier_price | product_supplier_price_validity/tests/__init__.py | Python | agpl-3.0 | 820 |
# vim: et:sta:bs=2:sw=4:
from mirte.core import Module
from joyce.base import JoyceChannel
from joyce.comet import CometJoyceClient
class MirrorChannelClass(JoyceChannel):
def __init__(self, server, *args, **kwargs):
super(MirrorChannelClass, self).__init__(*args, **kwargs)
self.server = server
self.msg_map = {
'occupation': self._msg_occupation,
'welcome': self._msg_welcome,
'occupation_update': self._msg_occupation_update }
self.received_welcome = False
self._send_initial_messages()
def _send_initial_messages(self):
self.send_message({'type': 'set_msgFilter',
'occupation': None})
self.send_message({'type': 'get_occupation'})
def handle_message(self, data):
typ = data.get('type')
if typ in self.msg_map:
self.msg_map[typ](data)
def _msg_welcome(self, data):
if not self.received_welcome:
self.received_welcome = True
return
# This could have happened if the mirrored server restarted.
self.l.warn("Received another `welcome' message: the channel has "+
"been reset.")
self._send_initial_messages()
def _msg_occupation(self, data):
self.l.info("Received occupation message: %s entries",
len(data['occupation']))
self.server.state.push_occupation_changes(data['occupation'])
def _msg_occupation_update(self, data):
self.server.state.push_occupation_changes(data['update'])
class Mirror(CometJoyceClient):
def __init__(self, *args, **kwargs):
super(Mirror, self).__init__(*args, **kwargs)
self.channel_class = self._channel_constructor
def _channel_constructor(self, *args, **kwargs):
return MirrorChannelClass(self, *args, **kwargs)
def run(self):
# NOTE we assume CometJoyceClient.run returns immediately
super(Mirror, self).run()
self.channel = self.create_channel()
| bwesterb/tkbd | src/mirror.py | Python | agpl-3.0 | 2,060 |
# -*- coding: utf-8 -*-
# Copyright (c) 2006 - 2014 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the Corba configuration page.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import pyqtSlot
from E5Gui.E5Completers import E5FileCompleter
from E5Gui import E5FileDialog
from .ConfigurationPageBase import ConfigurationPageBase
from .Ui_CorbaPage import Ui_CorbaPage
import Preferences
import Utilities
import UI.PixmapCache
class CorbaPage(ConfigurationPageBase, Ui_CorbaPage):
"""
Class implementing the Corba configuration page.
"""
def __init__(self):
"""
Constructor
"""
super(CorbaPage, self).__init__()
self.setupUi(self)
self.setObjectName("CorbaPage")
self.idlButton.setIcon(UI.PixmapCache.getIcon("open.png"))
self.idlCompleter = E5FileCompleter(self.idlEdit)
# set initial values
self.idlEdit.setText(Preferences.getCorba("omniidl"))
def save(self):
"""
Public slot to save the Corba configuration.
"""
Preferences.setCorba("omniidl", self.idlEdit.text())
@pyqtSlot()
def on_idlButton_clicked(self):
"""
Private slot to handle the IDL compiler selection.
"""
file = E5FileDialog.getOpenFileName(
self,
self.tr("Select IDL compiler"),
self.idlEdit.text(),
"")
if file:
self.idlEdit.setText(Utilities.toNativeSeparators(file))
def create(dlg):
"""
Module function to create the configuration page.
@param dlg reference to the configuration dialog
@return reference to the instantiated page (ConfigurationPageBase)
"""
page = CorbaPage()
return page
| davy39/eric | Preferences/ConfigurationPages/CorbaPage.py | Python | gpl-3.0 | 1,837 |
#!/usr/bin/env python
import IPython
import sys
import numpy as np
import scipy.sparse as sp
from collections import Counter
import itertools
import time
import struct
try:
import numexpr as ne
have_numexpr = True
except ImportError:
have_numexpr = False
MAX_BIGRAM = 2**16
f = open(sys.argv[2])
ulong_size = struct.unpack("<i", f.read(4))[0]
ulong_fmt = '<u%d' % ulong_size
FMT = "%%0%dx" % (ulong_size*2)
rec_hdr = np.dtype( [ ('caller', ulong_fmt), ('pc', ulong_fmt), ('cr3', ulong_fmt), ('nbins', '<I4') ] )
hist_entry = [ ('key', '<H'), ('value', '<u4') ]
meta = []
data = []
rows = []
cols = []
print >>sys.stderr, "Parsing file..."
i = 0
while True:
hdr = np.fromfile(f, dtype=rec_hdr, count=1)
if not hdr: break
entries = np.fromfile(f, dtype=hist_entry, count=hdr['nbins'])
# Might happen if a tap only wrote one byte. In that case there's no bigram
if entries.size == 0: continue
#if len(entries) < 5: continue
#print >>sys.stderr, "Parsed entry with %d bins, file offset=%d" % (hdr['nbins'],f.tell())
cols.extend(entries['key'])
rows.extend([i]*len(entries))
data.extend(entries['value'])
meta.append(hdr)
i += 1
f.close()
print >>sys.stderr, "Parsed", i, "tap points"
print >>sys.stderr, "Converting to nparrays..."
data = np.array(data,dtype=np.float32)
rows = np.array(rows)
cols = np.array(cols)
print >>sys.stderr, "Creating sparse matrix..."
spdata = sp.coo_matrix((data,[rows,cols]), (i, MAX_BIGRAM), dtype=np.float32)
print >>sys.stderr, "Converting to CSR format..."
spdata = spdata.tocsr()
print >>sys.stderr, "Normalizing..."
row_sums = np.array(spdata.sum(axis=1))[:,0]
row_indices, col_indices = spdata.nonzero()
spdata.data /= row_sums[row_indices]
print >>sys.stderr, "Loading training data..."
training = np.fromfile(open(sys.argv[1]),dtype=np.int).astype(np.float32)
training /= training.sum()
st = time.time()
# \sum{H(P_i)}
print >>sys.stderr, "Computing sum(H(Pi))..."
st1 = time.time()
htrain = -(training[training.nonzero()]*np.log(training[training.nonzero()])).sum()
hcopy = spdata.copy()
if have_numexpr:
x = hcopy.data
hcopy.data = ne.evaluate("x*log(x)")
else:
hcopy.data = hcopy.data*np.log(hcopy.data)
hents = hcopy.sum(axis=1)
hents = -hents
# Delete the copy; not using it any more
del hcopy
if have_numexpr:
rhs = ne.evaluate("(hents + htrain) / 2")
else:
rhs = (hents + htrain) / 2
del hents
ed1 = time.time()
print >>sys.stderr, "Computed in %f seconds" % (ed1-st1)
# H(\sum{P_i})
print >>sys.stderr, "Computing H(sum(Pi))..."
# Tile the training vector into an Nx65536 (sparse) matrix
print >>sys.stderr, "Creating training matrix..."
training = sp.csr_matrix(training.astype(np.float32))
# Create the CSR matrix directly
stt = time.time()
tindptr = np.arange(0, len(training.indices)*spdata.shape[0]+1, len(training.indices), dtype=np.int32)
tindices = np.tile(training.indices, spdata.shape[0])
tdata = np.tile(training.data, spdata.shape[0])
mtraining = sp.csr_matrix((tdata, tindices, tindptr), shape=spdata.shape)
edt = time.time()
print >>sys.stderr, "Created in %f seconds" % (edt-stt)
st2 = time.time()
spi = spdata+mtraining
if have_numexpr:
x = spi.data
spi.data = ne.evaluate("x/2")
else:
spi.data /= 2
if have_numexpr:
x = spi.data
spi.data = ne.evaluate("x*log(x)")
else:
spi.data = spi.data*np.log(spi.data)
lhs = spi.sum(axis=1)
lhs = -lhs
del spi
dists = lhs - rhs
dists = np.asarray(dists.T)[0]
ed2 = time.time()
print >>sys.stderr, "Computed in %f seconds" % (ed2-st2)
ed = time.time()
print >>sys.stderr, "Finished in %f seconds" % (ed-st)
sorted_dists = np.argsort(dists)
for i in sorted_dists:
row = meta[i]
print (FMT + " " + FMT + " " + FMT + " %f") % (row['caller'], row['pc'], row['cr3'], dists[i])
| KernelAnalysisPlatform/kvalgrind | scripts/nearest_bigram_js.py | Python | gpl-3.0 | 3,814 |
# Download the Python helper library from twilio.com/docs/python/install
import os
from twilio.rest import Client
from datetime import date
# Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account_sid, auth_token)
# A list of notification objects with the properties described above
notifications = client.notifications \
.list(message_date_after=date(2009, 7, 6), log="1")
for notification in notifications:
print(notification.request_url)
| TwilioDevEd/api-snippets | rest/notification/list-get-example-3/list-get-example-3.7.x.py | Python | mit | 655 |
#!/usr/bin/env python
import urlparse
import urllib2
import BaseHTTPServer
import unittest
import hashlib
from test import test_support
mimetools = test_support.import_module('mimetools', deprecated=True)
threading = test_support.import_module('threading')
# Loopback http server infrastructure
class LoopbackHttpServer(BaseHTTPServer.HTTPServer):
"""HTTP server w/ a few modifications that make it useful for
loopback testing purposes.
"""
def __init__(self, server_address, RequestHandlerClass):
BaseHTTPServer.HTTPServer.__init__(self,
server_address,
RequestHandlerClass)
# Set the timeout of our listening socket really low so
# that we can stop the server easily.
self.socket.settimeout(1.0)
def get_request(self):
"""BaseHTTPServer method, overridden."""
request, client_address = self.socket.accept()
# It's a loopback connection, so setting the timeout
# really low shouldn't affect anything, but should make
# deadlocks less likely to occur.
request.settimeout(10.0)
return (request, client_address)
class LoopbackHttpServerThread(threading.Thread):
"""Stoppable thread that runs a loopback http server."""
def __init__(self, request_handler):
threading.Thread.__init__(self)
self._stop = False
self.ready = threading.Event()
request_handler.protocol_version = "HTTP/1.0"
self.httpd = LoopbackHttpServer(('127.0.0.1', 0),
request_handler)
#print "Serving HTTP on %s port %s" % (self.httpd.server_name,
# self.httpd.server_port)
self.port = self.httpd.server_port
def stop(self):
"""Stops the webserver if it's currently running."""
# Set the stop flag.
self._stop = True
self.join()
def run(self):
self.ready.set()
while not self._stop:
self.httpd.handle_request()
# Authentication infrastructure
class DigestAuthHandler:
"""Handler for performing digest authentication."""
def __init__(self):
self._request_num = 0
self._nonces = []
self._users = {}
self._realm_name = "Test Realm"
self._qop = "auth"
def set_qop(self, qop):
self._qop = qop
def set_users(self, users):
assert isinstance(users, dict)
self._users = users
def set_realm(self, realm):
self._realm_name = realm
def _generate_nonce(self):
self._request_num += 1
nonce = hashlib.md5(str(self._request_num)).hexdigest()
self._nonces.append(nonce)
return nonce
def _create_auth_dict(self, auth_str):
first_space_index = auth_str.find(" ")
auth_str = auth_str[first_space_index+1:]
parts = auth_str.split(",")
auth_dict = {}
for part in parts:
name, value = part.split("=")
name = name.strip()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
else:
value = value.strip()
auth_dict[name] = value
return auth_dict
def _validate_auth(self, auth_dict, password, method, uri):
final_dict = {}
final_dict.update(auth_dict)
final_dict["password"] = password
final_dict["method"] = method
final_dict["uri"] = uri
HA1_str = "%(username)s:%(realm)s:%(password)s" % final_dict
HA1 = hashlib.md5(HA1_str).hexdigest()
HA2_str = "%(method)s:%(uri)s" % final_dict
HA2 = hashlib.md5(HA2_str).hexdigest()
final_dict["HA1"] = HA1
final_dict["HA2"] = HA2
response_str = "%(HA1)s:%(nonce)s:%(nc)s:" \
"%(cnonce)s:%(qop)s:%(HA2)s" % final_dict
response = hashlib.md5(response_str).hexdigest()
return response == auth_dict["response"]
def _return_auth_challenge(self, request_handler):
request_handler.send_response(407, "Proxy Authentication Required")
request_handler.send_header("Content-Type", "text/html")
request_handler.send_header(
'Proxy-Authenticate', 'Digest realm="%s", '
'qop="%s",'
'nonce="%s", ' % \
(self._realm_name, self._qop, self._generate_nonce()))
# XXX: Not sure if we're supposed to add this next header or
# not.
#request_handler.send_header('Connection', 'close')
request_handler.end_headers()
request_handler.wfile.write("Proxy Authentication Required.")
return False
def handle_request(self, request_handler):
"""Performs digest authentication on the given HTTP request
handler. Returns True if authentication was successful, False
otherwise.
If no users have been set, then digest auth is effectively
disabled and this method will always return True.
"""
if len(self._users) == 0:
return True
if 'Proxy-Authorization' not in request_handler.headers:
return self._return_auth_challenge(request_handler)
else:
auth_dict = self._create_auth_dict(
request_handler.headers['Proxy-Authorization']
)
if auth_dict["username"] in self._users:
password = self._users[ auth_dict["username"] ]
else:
return self._return_auth_challenge(request_handler)
if not auth_dict.get("nonce") in self._nonces:
return self._return_auth_challenge(request_handler)
else:
self._nonces.remove(auth_dict["nonce"])
auth_validated = False
# MSIE uses short_path in its validation, but Python's
# urllib2 uses the full path, so we're going to see if
# either of them works here.
for path in [request_handler.path, request_handler.short_path]:
if self._validate_auth(auth_dict,
password,
request_handler.command,
path):
auth_validated = True
if not auth_validated:
return self._return_auth_challenge(request_handler)
return True
# Proxy test infrastructure
class FakeProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""This is a 'fake proxy' that makes it look like the entire
internet has gone down due to a sudden zombie invasion. It main
utility is in providing us with authentication support for
testing.
"""
def __init__(self, digest_auth_handler, *args, **kwargs):
# This has to be set before calling our parent's __init__(), which will
# try to call do_GET().
self.digest_auth_handler = digest_auth_handler
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def log_message(self, format, *args):
# Uncomment the next line for debugging.
#sys.stderr.write(format % args)
pass
def do_GET(self):
(scm, netloc, path, params, query, fragment) = urlparse.urlparse(
self.path, 'http')
self.short_path = path
if self.digest_auth_handler.handle_request(self):
self.send_response(200, "OK")
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write("You've reached %s!<BR>" % self.path)
self.wfile.write("Our apologies, but our server is down due to "
"a sudden zombie invasion.")
# Test cases
class BaseTestCase(unittest.TestCase):
def setUp(self):
self._threads = test_support.threading_setup()
def tearDown(self):
test_support.threading_cleanup(*self._threads)
class ProxyAuthTests(BaseTestCase):
URL = "http://localhost"
USER = "tester"
PASSWD = "test123"
REALM = "TestRealm"
def setUp(self):
super(ProxyAuthTests, self).setUp()
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
self.server = LoopbackHttpServerThread(create_fake_proxy_handler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib2.ProxyHandler({"http" : proxy_url})
self.proxy_digest_handler = urllib2.ProxyDigestAuthHandler()
self.opener = urllib2.build_opener(handler, self.proxy_digest_handler)
def tearDown(self):
self.server.stop()
super(ProxyAuthTests, self).tearDown()
def test_proxy_with_bad_password_raises_httperror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD+"bad")
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib2.HTTPError,
self.opener.open,
self.URL)
def test_proxy_with_no_password_raises_httperror(self):
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib2.HTTPError,
self.opener.open,
self.URL)
def test_proxy_qop_auth_works(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth")
result = self.opener.open(self.URL)
while result.read():
pass
result.close()
def test_proxy_qop_auth_int_works_or_throws_urlerror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth-int")
try:
result = self.opener.open(self.URL)
except urllib2.URLError:
# It's okay if we don't support auth-int, but we certainly
# shouldn't receive any kind of exception here other than
# a URLError.
result = None
if result:
while result.read():
pass
result.close()
def GetRequestHandler(responses):
class FakeHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
server_version = "TestHTTP/"
requests = []
headers_received = []
port = 80
def do_GET(self):
body = self.send_head()
if body:
self.wfile.write(body)
def do_POST(self):
content_length = self.headers['Content-Length']
post_data = self.rfile.read(int(content_length))
self.do_GET()
self.requests.append(post_data)
def send_head(self):
FakeHTTPRequestHandler.headers_received = self.headers
self.requests.append(self.path)
response_code, headers, body = responses.pop(0)
self.send_response(response_code)
for (header, value) in headers:
self.send_header(header, value % self.port)
if body:
self.send_header('Content-type', 'text/plain')
self.end_headers()
return body
self.end_headers()
def log_message(self, *args):
pass
return FakeHTTPRequestHandler
class TestUrlopen(BaseTestCase):
"""Tests urllib2.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
"""
def setUp(self):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
super(TestUrlopen, self).setUp()
def start_server(self, responses):
handler = GetRequestHandler(responses)
self.server = LoopbackHttpServerThread(handler)
self.server.start()
self.server.ready.wait()
port = self.server.port
handler.port = port
return handler
def test_redirection(self):
expected_response = 'We got here...'
responses = [
(302, [('Location', 'http://localhost:%s/somewhere_else')], ''),
(200, [], expected_response)
]
handler = self.start_server(responses)
try:
f = urllib2.urlopen('http://localhost:%s/' % handler.port)
data = f.read()
f.close()
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ['/', '/somewhere_else'])
finally:
self.server.stop()
def test_404(self):
expected_response = 'Bad bad bad...'
handler = self.start_server([(404, [], expected_response)])
try:
try:
urllib2.urlopen('http://localhost:%s/weeble' % handler.port)
except urllib2.URLError, f:
pass
else:
self.fail('404 should raise URLError')
data = f.read()
f.close()
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ['/weeble'])
finally:
self.server.stop()
def test_200(self):
expected_response = 'pycon 2008...'
handler = self.start_server([(200, [], expected_response)])
try:
f = urllib2.urlopen('http://localhost:%s/bizarre' % handler.port)
data = f.read()
f.close()
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ['/bizarre'])
finally:
self.server.stop()
def test_200_with_parameters(self):
expected_response = 'pycon 2008...'
handler = self.start_server([(200, [], expected_response)])
try:
f = urllib2.urlopen('http://localhost:%s/bizarre' % handler.port, 'get=with_feeling')
data = f.read()
f.close()
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ['/bizarre', 'get=with_feeling'])
finally:
self.server.stop()
def test_sending_headers(self):
handler = self.start_server([(200, [], "we don't care")])
try:
req = urllib2.Request("http://localhost:%s/" % handler.port,
headers={'Range': 'bytes=20-39'})
urllib2.urlopen(req)
self.assertEqual(handler.headers_received['Range'], 'bytes=20-39')
finally:
self.server.stop()
def test_basic(self):
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
for attr in ("read", "close", "info", "geturl"):
self.assertTrue(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assertTrue(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
finally:
self.server.stop()
def test_info(self):
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
info_obj = open_url.info()
self.assertIsInstance(info_obj, mimetools.Message,
"object returned by 'info' is not an "
"instance of mimetools.Message")
self.assertEqual(info_obj.getsubtype(), "plain")
finally:
self.server.stop()
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
handler = self.start_server([(200, [], "we don't care")])
try:
open_url = urllib2.urlopen("http://localhost:%s" % handler.port)
url = open_url.geturl()
self.assertEqual(url, "http://localhost:%s" % handler.port)
finally:
self.server.stop()
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
# as indicated by the comment below, this might fail with some ISP,
# so we run the test only when -unetwork/-uall is specified to
# mitigate the problem a bit (see #17564)
test_support.requires('network')
self.assertRaises(IOError,
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
# to their own sites, finding a domain name to use
# for this test is difficult. RFC2606 leads one to
# believe that '.invalid' should work, but experience
# seemed to indicate otherwise. Single character
# TLDs are likely to remain invalid, so this seems to
# be the best choice. The trailing '.' prevents a
# related problem: The normal DNS resolver appends
# the domain names from the search path if there is
# no '.' the end and, and if one of those domains
# implements a '*' rule a result is returned.
# However, none of this will prevent the test from
# failing if the ISP hijacks all invalid domain
# requests. The real solution would be to be able to
# parameterize the framework with a mock resolver.
urllib2.urlopen, "http://sadflkjsasf.i.nvali.d./")
def test_iteration(self):
expected_response = "pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
try:
data = urllib2.urlopen("http://localhost:%s" % handler.port)
for line in data:
self.assertEqual(line, expected_response)
finally:
self.server.stop()
def ztest_line_iteration(self):
lines = ["We\n", "got\n", "here\n", "verylong " * 8192 + "\n"]
expected_response = "".join(lines)
handler = self.start_server([(200, [], expected_response)])
try:
data = urllib2.urlopen("http://localhost:%s" % handler.port)
for index, line in enumerate(data):
self.assertEqual(line, lines[index],
"Fetched line number %s doesn't match expected:\n"
" Expected length was %s, got %s" %
(index, len(lines[index]), len(line)))
finally:
self.server.stop()
self.assertEqual(index + 1, len(lines))
def test_main():
# We will NOT depend on the network resource flag
# (Lib/test/regrtest.py -u network) since all tests here are only
# localhost. However, if this is a bad rationale, then uncomment
# the next line.
#test_support.requires("network")
test_support.run_unittest(ProxyAuthTests, TestUrlopen)
if __name__ == "__main__":
test_main()
| ianyh/heroku-buildpack-python-opencv | vendor/.heroku/lib/python2.7/test/test_urllib2_localnet.py | Python | mit | 20,012 |
from model.group import Group
from model.contact import Contact
def test_group_list(app, db):
ui_list = app.group.get_group_list()
def clean(group):
return Group(id=group.id, name=group.name.strip())
db_list = map(clean, db.get_group_list())
assert sorted(ui_list, key=Group.id_or_max) == sorted (db_list, key=Group.id_or_max)
def test_contact_list(app, db):
ui_list = app.contacts.get_contact_list()
def clean(contact):
return Contact(id=contact.id, lastname=contact.lastname.strip(), firstname=contact.firstname.strip())
db_list = map(clean, db.get_contact_list())
assert sorted(ui_list, key=Contact.id_or_max) == sorted (db_list, key=Contact.id_or_max)
| goeliv/python_training | test/test_db_matches_ui.py | Python | apache-2.0 | 707 |
#!/usr/bin/env python
import sys
import os
import parser
import subprocess
if "VIRTUAL_ENV" not in os.environ:
sys.stderr.write("$VIRTUAL_ENV not found.\n\n")
parser.print_usage()
sys.exit(-1)
virtualenv = os.environ["VIRTUAL_ENV"]
file_path = os.path.dirname(__file__)
subprocess.call(["pip", "install", "-E", virtualenv, "--requirement",
os.path.join(file_path, "REQUIREMENTS")]) | frog32/morgainemoviedb | bootstrap.py | Python | agpl-3.0 | 411 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-07-26 00:50
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('recipe', '0007_auto_20170723_2046'),
('ingredient', '0004_auto_20170723_2101'),
]
operations = [
migrations.AlterField(
model_name='ingredient',
name='ingredient_group',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ingredients', to='ingredient.IngredientGroup', verbose_name='IngredientGroup'),
),
migrations.AlterField(
model_name='ingredientgroup',
name='recipe',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='ingredient_groups', to='recipe.Recipe', verbose_name='recipe'),
),
migrations.AlterField(
model_name='ingredientgroup',
name='title',
field=models.CharField(blank=True, max_length=150, null=True, verbose_name='title'),
),
migrations.AlterUniqueTogether(
name='ingredientgroup',
unique_together=set([('title', 'recipe')]),
),
]
| RyanNoelk/OpenEats | api/v1/ingredient/migrations/0005_auto_20170725_1950.py | Python | mit | 1,312 |
"""
:mod:`pyffi.formats.tri` --- TRI (.tri)
=======================================
A .tri file contains facial expression data, that is, morphs for dynamic
expressions such as smile, frown, and so on.
Implementation
--------------
.. autoclass:: TriFormat
:show-inheritance:
:members:
Regression tests
----------------
Read a TRI file
^^^^^^^^^^^^^^^
>>> # check and read tri file
>>> stream = open('tests/tri/mmouthxivilai.tri', 'rb')
>>> data = TriFormat.Data()
>>> data.inspect(stream)
>>> # do some stuff with header?
>>> data.num_vertices
89
>>> data.num_tri_faces
215
>>> data.num_quad_faces
0
>>> data.num_uvs
89
>>> data.num_morphs
18
>>> data.read(stream) # doctest: +ELLIPSIS
>>> print([str(morph.name.decode("ascii")) for morph in data.morphs])
['Fear', 'Surprise', 'Aah', 'BigAah', 'BMP', 'ChJSh', 'DST', 'Eee', 'Eh', \
'FV', 'I', 'K', 'N', 'Oh', 'OohQ', 'R', 'Th', 'W']
Parse all TRI files in a directory tree
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
>>> for stream, data in TriFormat.walkData('tests/tri'):
... print(stream.name)
tests/tri/mmouthxivilai.tri
Create an TRI file from scratch and write to file
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
>>> data = TriFormat.Data()
>>> from tempfile import TemporaryFile
>>> stream = TemporaryFile()
>>> data.write(stream)
"""
# ***** BEGIN LICENSE BLOCK *****
#
# Copyright (c) 2007-2012, Python File Format Interface
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Python File Format Interface
# project nor the names of its contributors may be used to endorse
# or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# ***** END LICENSE BLOCK *****
from itertools import chain
import struct
import os
import re
import pyffi.object_models.xml
import pyffi.object_models.common
from pyffi.object_models.xml.basic import BasicBase
import pyffi.object_models
from pyffi.utils.graph import EdgeFilter
class TriFormat(pyffi.object_models.xml.FileFormat):
"""This class implements the TRI format."""
xml_file_name = 'tri.xml'
# where to look for tri.xml and in what order:
# TRIXMLPATH env var, or TriFormat module directory
xml_file_path = [os.getenv('TRIXMLPATH'), os.path.dirname(__file__)]
# file name regular expression match
RE_FILENAME = re.compile(r'^.*\.tri$', re.IGNORECASE)
# basic types
int = pyffi.object_models.common.Int
uint = pyffi.object_models.common.UInt
byte = pyffi.object_models.common.Byte
ubyte = pyffi.object_models.common.UByte
char = pyffi.object_models.common.Char
short = pyffi.object_models.common.Short
ushort = pyffi.object_models.common.UShort
float = pyffi.object_models.common.Float
# implementation of tri-specific basic types
class SizedStringZ(pyffi.object_models.common.SizedString):
def get_size(self, data=None):
"""Return number of bytes this type occupies in a file.
:return: Number of bytes.
"""
return (
1 +
pyffi.object_models.common.SizedString.get_size(self, data)
)
def read(self, stream, data):
"""Read string from stream.
:param stream: The stream to read from.
:type stream: file
"""
pyffi.object_models.common.SizedString.read(self, stream, data)
self._value = self._value.rstrip(pyffi.object_models.common._b00)
def write(self, stream, data):
"""Write string to stream.
:param stream: The stream to write to.
:type stream: file
"""
self._value += pyffi.object_models.common._b00
pyffi.object_models.common.SizedString.write(self, stream, data)
self._value = self._value.rstrip(pyffi.object_models.common._b00)
class FileSignature(BasicBase):
"""Basic type which implements the header of a TRI file."""
def __init__(self, **kwargs):
BasicBase.__init__(self, **kwargs)
def __str__(self):
return 'FRTRI'
def get_detail_display(self):
return self.__str__()
def get_hash(self, data=None):
"""Return a hash value for this value.
:return: An immutable object that can be used as a hash.
"""
return None
def read(self, stream, data):
"""Read header string from stream and check it.
:param stream: The stream to read from.
:type stream: file
"""
hdrstr = stream.read(5)
# check if the string is correct
if hdrstr != "FRTRI".encode("ascii"):
raise ValueError(
"invalid TRI header: expected 'FRTRI' but got '%s'"
% hdrstr)
def write(self, stream, data):
"""Write the header string to stream.
:param stream: The stream to write to.
:type stream: file
"""
stream.write("FRTRI".encode("ascii"))
def get_size(self, data=None):
"""Return number of bytes the header string occupies in a file.
:return: Number of bytes.
"""
return 5
class FileVersion(BasicBase):
_value = 3
def get_value(self):
return self._value
def set_value(self, value):
self._value = int(value)
def __str__(self):
return '%03i' % self._value
def get_size(self, data=None):
return 3
def get_hash(self, data=None):
return self._value
def read(self, stream, data):
self._value = TriFormat.version_number(
stream.read(3).decode("ascii"))
def write(self, stream, data):
stream.write(('%03i' % self._value).encode("ascii"))
def get_detail_display(self):
return self.__str__()
@staticmethod
def version_number(version_str):
"""Converts version string into an integer.
:param version_str: The version string.
:type version_str: str
:return: A version integer.
>>> TriFormat.version_number('003')
3
>>> TriFormat.version_number('XXX')
-1
"""
try:
# note: always '003' in all files seen so far
return int(version_str)
except ValueError:
# not supported
return -1
class Header(pyffi.object_models.FileFormat.Data):
"""A class to contain the actual tri data."""
def inspect_quick(self, stream):
"""Quickly checks if stream contains TRI data, by looking at
the first 8 bytes. Reads the signature and the version.
:param stream: The stream to inspect.
:type stream: file
"""
pos = stream.tell()
try:
self._signature_value_.read(stream, self)
self._version_value_.read(stream, self)
finally:
stream.seek(pos)
# overriding pyffi.object_models.FileFormat.Data methods
def inspect(self, stream):
"""Quickly checks if stream contains TRI data, and reads
everything up to the arrays.
:param stream: The stream to inspect.
:type stream: file
"""
pos = stream.tell()
try:
self.inspect_quick(stream)
self._signature_value_.read(stream, self)
self._version_value_.read(stream, self)
self._num_vertices_value_.read(stream, self)
self._num_tri_faces_value_.read(stream, self)
self._num_quad_faces_value_.read(stream, self)
self._unknown_1_value_.read(stream, self)
self._unknown_2_value_.read(stream, self)
self._num_uvs_value_.read(stream, self)
self._has_uv_value_.read(stream, self)
self._num_morphs_value_.read(stream, self)
self._num_modifiers_value_.read(stream, self)
self._num_modifier_vertices_value_.read(stream, self)
finally:
stream.seek(pos)
def read(self, stream):
"""Read a tri file.
:param stream: The stream from which to read.
:type stream: ``file``
"""
self.inspect_quick(stream)
pyffi.object_models.xml.struct_.StructBase.read(
self, stream, self)
# check if we are at the end of the file
if stream.read(1):
raise ValueError(
'end of file not reached: corrupt tri file?')
# copy modifier vertices into modifier records
start_index = 0
for modifier in self.modifiers:
modifier.modifier_vertices.update_size()
for src_vert, dst_vert in zip(
self.modifier_vertices[
start_index:start_index
+ modifier.num_vertices_to_modify],
modifier.modifier_vertices):
dst_vert.x = src_vert.x
dst_vert.y = src_vert.y
dst_vert.z = src_vert.z
start_index += modifier.num_vertices_to_modify
def write(self, stream):
"""Write a tri file.
:param stream: The stream to which to write.
:type stream: ``file``
"""
# copy modifier vertices from modifier records to header
if self.modifiers:
self.num_modifier_vertices = sum(
modifier.num_vertices_to_modify
for modifier in self.modifiers)
self.modifier_vertices.update_size()
for self_vert, vert in zip(
self.modifier_vertices,
chain(*(modifier.modifier_vertices
for modifier in self.modifiers))):
self_vert.x = vert.x
self_vert.y = vert.y
self_vert.z = vert.z
else:
self.num_modifier_vertices = 0
self.modifier_vertices.update_size()
# write the data
pyffi.object_models.xml.struct_.StructBase.write(
self, stream, self)
def add_morph(self, name=None, relative_vertices=None):
"""Add a morph."""
self.num_morphs += 1
self.morphs.update_size()
return self.morphs[-1]
def add_modifier(self, name=None, relative_vertices=None):
"""Add a modifier."""
self.num_modifiers += 1
self.modifiers.update_size()
return self.modifiers[-1]
# GlobalNode
def get_global_child_nodes(self, edge_filter=EdgeFilter()):
return ([morph for morph in self.morphs]
+ [morph for morph in self.modifiers])
# XXX copied from pyffi.formats.egm.EgmFormat.MorphRecord
class MorphRecord:
"""
>>> # create morph with 3 vertices.
>>> morph = TriFormat.MorphRecord(argument=3)
>>> morph.set_relative_vertices(
... [(3, 5, 2), (1, 3, 2), (-9, 3, -1)])
>>> # scale should be 9/32768.0 = 0.0002746...
>>> morph.scale # doctest: +ELLIPSIS
0.0002746...
>>> for vert in morph.get_relative_vertices():
... print([int(1000 * x + 0.5) for x in vert])
[3000, 5000, 2000]
[1000, 3000, 2000]
[-8999, 3000, -999]
"""
def get_relative_vertices(self):
for vert in self.vertices:
yield (vert.x * self.scale,
vert.y * self.scale,
vert.z * self.scale)
def set_relative_vertices(self, vertices):
# copy to list
vertices = list(vertices)
# check length
if len(vertices) != self.arg:
raise ValueError("expected %i vertices, but got %i"
% (self.arg, len(vertices)))
# get extreme values of morph
max_value = max(max(abs(value) for value in vert)
for vert in vertices)
# calculate scale
self.scale = max_value / 32767.0
inv_scale = 1 / self.scale
# set vertices
for vert, self_vert in zip(vertices, self.vertices):
self_vert.x = int(vert[0] * inv_scale)
self_vert.y = int(vert[1] * inv_scale)
self_vert.z = int(vert[2] * inv_scale)
def apply_scale(self, scale):
"""Apply scale factor to data.
>>> # create morph with 3 vertices.
>>> morph = TriFormat.MorphRecord(argument=3)
>>> morph.set_relative_vertices(
... [(3, 5, 2), (1, 3, 2), (-9, 3, -1)])
>>> morph.apply_scale(2)
>>> for vert in morph.get_relative_vertices():
... print([int(1000 * x + 0.5) for x in vert])
[6000, 10000, 4000]
[2000, 6000, 4000]
[-17999, 6000, -1999]
"""
self.scale *= scale
if __name__=='__main__':
import doctest
doctest.testmod()
| griest024/PokyrimTools | pyffi-develop/pyffi/formats/tri/__init__.py | Python | mit | 14,754 |
__author__ = "root"
__prog__ = ""
import sys, math
def main():
t = int(sys.stdin.readline())
for i in xrange(t):
n = sys.stdin.readline().strip()
if __name__ == "__main__":
main()
| d3vas3m/TemplateGenerator | templates/template.py | Python | gpl-2.0 | 204 |
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
__lastupdated__ = "$Date$"
import os
from urllib import unquote
from invenio import webinterface_handler_wsgi_utils as apache
from invenio.config import \
CFG_TMPDIR, \
CFG_SITE_URL, \
CFG_SITE_NAME, \
CFG_SITE_LANG
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.webpage import page
from invenio import template
from invenio.access_control_engine import acc_authorize_action
from invenio.webuser import collect_user_info, page_not_authorized
from invenio.urlutils import redirect_to_url, make_canonical_urlargd
from invenio.webstat import perform_request_index
from invenio.webstat import perform_display_keyevent
from invenio.webstat import perform_display_customevent
from invenio.webstat import perform_display_customevent_help
from invenio.webstat import register_customevent
def detect_suitable_graph_format():
"""
Return suitable graph format default argument: gnuplot if it is
present, otherwise asciiart.
"""
try:
import Gnuplot
suitable_graph_format = "gnuplot"
except ImportError:
suitable_graph_format = "asciiart"
return suitable_graph_format
SUITABLE_GRAPH_FORMAT = detect_suitable_graph_format()
class WebInterfaceStatsPages(WebInterfaceDirectory):
"""Defines the set of stats pages."""
_exports = [ '',
'collection_population', 'search_frequency', 'search_type_distribution',
'download_frequency', 'customevent', 'customevent_help', 'customevent_register',
'export' ]
navtrail = """<a class="navtrail" href="%s/stats/%%(ln_link)s">Statistics</a>""" % CFG_SITE_URL
def __call__(self, req, form):
"""Index page."""
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='index',
ln=ln)
return page(title="Statistics",
body=perform_request_index(ln=ln),
description="CDS, Statistics",
keywords="CDS, statistics",
req=req,
lastupdated=__lastupdated__,
navmenuid='stats',
language=ln)
# KEY EVENT SECTION
def collection_population(self, req, form):
"""Collection population statistics page."""
argd = wash_urlargd(form, {'collection': (str, CFG_SITE_NAME),
'timespan': (str, "today"),
'format': (str, SUITABLE_GRAPH_FORMAT),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='collection population',
ln=ln)
return page(title="Collection population",
body=perform_display_keyevent('collection population', argd, req, ln=ln),
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS, Statistics, Collection population",
keywords="CDS, statistics, collection population",
req=req,
lastupdated=__lastupdated__,
navmenuid='collection population',
language=ln)
def search_frequency(self, req, form):
"""Search frequency statistics page."""
argd = wash_urlargd(form, {'timespan': (str, "today"),
'format': (str, SUITABLE_GRAPH_FORMAT),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='search frequency',
ln=ln)
return page(title="Search frequency",
body=perform_display_keyevent('search frequency', argd, req, ln=ln),
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS, Statistics, Search frequency",
keywords="CDS, statistics, search frequency",
req=req,
lastupdated=__lastupdated__,
navmenuid='search frequency',
language=ln)
def search_type_distribution(self, req, form):
"""Search type distribution statistics page."""
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
argd = wash_urlargd(form, {'timespan': (str, "today"),
'format': (str, SUITABLE_GRAPH_FORMAT),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='search type distribution',
ln=ln)
return page(title="Search type distribution",
body=perform_display_keyevent('search type distribution', argd, req, ln=ln),
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS, Statistics, Search type distribution",
keywords="CDS, statistics, search type distribution",
req=req,
lastupdated=__lastupdated__,
navmenuid='search type distribution',
language=ln)
def download_frequency(self, req, form):
"""Download frequency statistics page."""
argd = wash_urlargd(form, {'timespan': (str, "today"),
'format': (str, SUITABLE_GRAPH_FORMAT),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='download frequency',
ln=ln)
return page(title="Download frequency",
body=perform_display_keyevent('download frequency', argd, req, ln=ln),
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS, Statistics, Download frequency",
keywords="CDS, statistics, download frequency",
req=req,
lastupdated=__lastupdated__,
navmenuid='download frequency',
language=ln)
# CUSTOM EVENT SECTION
def customevent(self, req, form):
"""Custom event statistics page"""
arg_format = {'ids': (list, []),
'timespan': (str, "today"),
'format': (str, SUITABLE_GRAPH_FORMAT),
'ln': (str, CFG_SITE_LANG)}
for key in form.keys():
if key[:4] == 'cols':
i = key[4:]
arg_format['cols'+i]=(list, [])
arg_format['col_value'+i]=(list, [])
arg_format['bool'+i]=(list, [])
argd = wash_urlargd(form, arg_format)
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='custom event',
ln=ln)
body = perform_display_customevent(argd['ids'], argd, req=req, ln=ln)
return page(title="Custom event",
body=body,
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS Personalize, Statistics, Custom event",
keywords="CDS, statistics, custom event",
req=req,
lastupdated=__lastupdated__,
navmenuid='custom event',
language=ln)
def customevent_help(self, req, form):
"""Custom event help page"""
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='custom event help',
ln=ln)
return page(title="Custom event help",
body=perform_display_customevent_help(ln=ln),
navtrail="""<a class="navtrail" href="%s/stats/%s">Statistics</a>""" % \
(CFG_SITE_URL, (ln != CFG_SITE_LANG and '?ln='+ln) or ''),
description="CDS Personalize, Statistics, Custom event help",
keywords="CDS, statistics, custom event help",
req=req,
lastupdated=__lastupdated__,
navmenuid='custom event help',
language=ln)
def customevent_register(self, req, form):
"""Register a customevent and reload to it defined url"""
argd = wash_urlargd(form, {'id': (str, ""),
'arg': (str, ""),
'url': (str, ""),
'ln': (str, CFG_SITE_LANG)})
params = argd['arg'].split(',')
if "WEBSTAT_IP" in params:
index = params.index("WEBSTAT_IP")
params[index] = str(req.remote_ip)
register_customevent(argd['id'], params)
return redirect_to_url(req, unquote(argd['url']), apache.HTTP_MOVED_PERMANENTLY)
# EXPORT SECTION
def export(self, req, form):
"""Exports data"""
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
user_info = collect_user_info(req)
(auth_code, auth_msg) = acc_authorize_action(user_info, 'runwebstatadmin')
if auth_code:
return page_not_authorized(req,
navtrail=self.navtrail % {'ln_link':(ln != CFG_SITE_LANG and '?ln='+ln) or ''},
text=auth_msg,
navmenuid='export',
ln=ln)
argd = wash_urlargd(form, {"filename": (str, ""),
"mime": (str, "")})
# Check that the particular file exists and that it's OK to export
webstat_files = [x for x in os.listdir(CFG_TMPDIR) if x.startswith("webstat")]
if argd["filename"] not in webstat_files:
return "Bad file."
# Set correct header type
req.content_type = argd["mime"]
req.send_http_header()
# Rebuild path, send it to the user, and clean up.
filename = CFG_TMPDIR + '/' + argd["filename"]
req.sendfile(filename)
os.remove(filename)
index = __call__
| pombredanne/invenio-old | modules/webstat/lib/webstat_webinterface.py | Python | gpl-2.0 | 13,467 |
# Copyright (c) 2011, Yeiniel Suarez Sosa.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Yeiniel Suarez Sosa. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import html
import mimetypes
from aurora import views
from aurora.webapp import foundation
__all__ = ['Views']
class Views(views.Views):
""" Provide generic template based view rendering support.
This component is based on the :class:`aurora.views.Views` class from
the ``Views`` framework. It provide the same services plus a new one
(the :meth:`render2response` service) specific for usage in the context
of Web applications.
The component add a ``escape`` default content item designed to escape all
content that represent a XSS attack vulnerability.
"""
def __init__(self):
self.add_default('escape', html.escape)
def render2response(self, request: foundation.Request, template_name: str,
**context) -> foundation.Response:
""" Render a template into a :class:`~aurora.webapp.foundation.Response` object with context.
Template file names must have two extensions. The first extension is
used to identify the content type of the output and the second
extension is used to identify the engine capable of handling
correctly the syntax used in the template.
:param request: The request object used to build the response.
:param template_name: The relative template name string without the
last extension.
:param context: The context mapping.
:return: The rendered :class:`~aurora.webapp.foundation.Response`
object.
"""
response = request.response_factory(
text=self.render(template_name, request=request, **context))
response.content_type, _ = mimetypes.guess_type(template_name)
if not response.content_type:
response.content_type = self.DEFAULT_MIME_TYPE
return response
def handler4template(self, template_name: str, **context) -> foundation.Handler:
""" Produce a Web request handler that simply render a template.
This service use the :meth:`render2response` service and the same
rules about template names are applied.
`
:param template_name: The relative template name string without the
last extension.
:param context: The context mapping.
:return: A Web request
:class:`handler <aurora.webapp.foundation.Handler>`.
"""
def handler(request):
return self.render2response(request, template_name, **context)
return handler
if not mimetypes.inited:
mimetypes.init()
| yeiniel/aurora | aurora/webcomponents/views.py | Python | bsd-3-clause | 4,110 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013-2015 Therp BV <http://therp.nl>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'CAMT Format Bank Statements Import',
'version': '8.0.0.3.0',
'license': 'AGPL-3',
'author': 'Odoo Community Association (OCA), Therp BV',
'website': 'https://github.com/OCA/bank-statement-import',
'category': 'Banking addons',
'depends': [
'account_bank_statement_import',
],
'demo': [
'demo/demo_data.xml',
],
'installable': True,
}
| Endika/bank-statement-import | account_bank_statement_import_camt/__openerp__.py | Python | agpl-3.0 | 1,334 |
#! /usr/bin/python
# -*- python -*-
import avro.io
import avro.datafile
import sys
import glob
try:
import json
except ImportError:
import simplejson as json
for f in sys.argv[1:]:
for d in avro.datafile.DataFileReader(file(f), avro.io.DatumReader()):
print json.dumps(d)
| tomslabs/avro-utils | src/main/scripts/dumpAvroFile.py | Python | apache-2.0 | 294 |
#!/usr/bin/python2
# -*- coding: utf-8 -*-
#
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module exposes one function Login(), that authenticates user into the
Google services, returning an authentication token and a cookie."""
from datetime import datetime, timedelta
import time
import urllib
import urllib2
class AuthenticationError(urllib2.HTTPError):
"""Exception class to indicate an error when authenticating with Google's
ClientLogin.
"""
def __init__(self, url, code, message, headers, args):
"""Initialize the error with the specified arguments."""
super(AuthenticationError, self).__init__(url, code, message,
headers, None)
self.args = args
self.reason = args["Error"]
def _GetHTTPOpener():
"""Create an http opener used to interact with Google's ClientLogin.
Returns:
An http opener capable of handling anything needed to interact with
Google's ClientLogin.
"""
# Create an http opener capable of handling proxies, http and https.
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
opener.add_handler(urllib2.HTTPSHandler())
return opener
def _ParseBodyAsDict(body):
""" Parse the specified body as a dictionary with each element in a line, and
key value pairs separated by '='.
Args:
body: The string with the HTTP body to parse.
Returns:
A dictionary with the body contents.
"""
return dict(line.split('=') for line in body.split('\n') if line)
def _GetGoogleAuthtoken(account_type, user, password, service, source):
"""This function authenticates the user in the specified service using
the provided authentication data.
Args:
account_type: Type of the account to login, could be GOOGLE or any other
string if the account is external.
user: Name of the user to be logged in.
password: Password of the user to be logged in.
service: Service where the user wants to log in, for example, 'ah'.
source: Name of the application requesting the user authentication.
Returns:
The authentatication token for the user if the supplied data is correct.
Raises:
lib.AuthenticationError: This exception is raised if the HTTP response is
403 - Forbidden, in this case the error is parsed and returned to the
user in the exception.
urllib2.HTTPError: This exception is raised for any other HTTP error.
"""
# Create a request for Google's Client login, with the specied data.
auth_request_data_map = {
'accountType': account_type,
'Email': user,
'Passwd': password,
'service': service,
'source': source
}
auth_request_data = urllib.urlencode(auth_request_data_map)
auth_url = 'https://www.google.com/accounts/ClientLogin'
auth_request = urllib2.Request(auth_url, auth_request_data)
try:
# Create a custom opener, make the request and extract the body.
http_opener = _GetHTTPOpener()
auth_response = http_opener.open(auth_request)
auth_response_body = auth_response.read()
# Parse the response data as a dictionary and return the 'Auth' key.
auth_response_data = _ParseBodyAsDict(auth_response_body)
return auth_response_data['Auth']
except urllib2.HTTPError as e:
# Check if the error was a 403 - Forbidden. In that case, forward the
# exception as an authentication error. Otherwise, just forward the
# exception.
if e.code == 403:
# Parse the error body as a dictionary and forward the exception as an
# authentication error.
response_dict = _ParseBodyAsDict(e.read())
raise AuthenticationError(auth_request.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetGaeCookie(host, service, auth_token, secure):
"""This function creates a login cookie using the authentication token
obtained after logging in successfully in the Google account.
Args:
host: Host where the user wants to login.
service: Service code where the user wants to login.
auth_token: Authentication token obtained from ClientLogin.
secure: True if we want a secure cookie, false if not.
Returns:
A cookie for the specifed service.
Raises:
urllib2.HTTPError: This exception is raised when the cookie cannot be
obtained and the user is redirected to another place.
"""
# Create a request for Google's service with the authentication token.
continue_location = 'http://localhost/'
cookie_request_data_map = {
'continue' : continue_location,
'auth' : auth_token,
}
cookie_request_data = urllib.urlencode(cookie_request_data_map)
cookie_url = '{protocol}://{host}/_{service}/login?{data}'.format(
protocol=('https' if secure else 'http'), host=host, service=service,
data=cookie_request_data)
cookie_request = urllib2.Request(cookie_url)
try:
# Create a custom opener, make the request and extract the body.
http_opener = _GetHTTPOpener()
cookie_response = http_opener.open(cookie_request)
except urllib2.HTTPError as e:
# Keep the error as the cookie response.
cookie_response = e
# Check that a redirection was made to the required continue location.
# Otherwise, return an HTTP error.
response_code = cookie_response.code
if (response_code != 302 or
cookie_response.info()['location'] != continue_location):
raise urllib2.HTTPError(cookie_request.get_full_url(), response_code,
cookie_response.msg, cookie_response.headers,
cookie_response.fp)
# Extract the cookie from the headers and remove 'HttpOnly' from it.
cookie = cookie_response.headers.get('Set-Cookie')
return cookie.replace('; HttpOnly', '')
def Login(host, account_type, user, password, service, source, secure):
"""Retrieve the authentication token and cookie from the specified service,
using the given user and password to authenticate.
Args:
host: Host where the user wants to login.
account_type: Type of the account to login, could be GOOGLE or any other
string if the account is external.
user: Name of the user to be logged in.
password: Password of the user to be logged in.
service: Service where the user wants to log in, for example, 'ah'.
source: Name of the application requesting the user authentication.
secure: True if we want a secure cookie, false if not.
Returns:
A tuple with the authentication token and a cookie for the specifed service.
"""
auth_token = _GetGoogleAuthtoken(account_type, user, password, service,
source)
cookie = _GetGaeCookie(host, service, auth_token, secure)
return auth_token, cookie
def _ParseCookieFields(cookie):
# Fields inside the cookie are separated by a semicolon, so split the cookie
# and process each token as a field.
cookie_fields = {}
for token in cookie.split(';'):
# Keys and values are separated by a single equal in the field, or they
# might be keys without values. In this case, use True as the field value.
equal_index = token.find('=')
if equal_index == -1:
field_name = token.strip()
field_value = True
else:
field_name = token[:equal_index].strip()
field_value = token[equal_index + 1:].strip()
cookie_fields[field_name] = field_value
return cookie_fields
def GetCookieExpirationTime(cookie):
"""Extract and return the expiration time in the cookie.
Args:
cookie: String with the cookie whose expiration time must be retrieved.
Returns:
A string with the cookie expiration time, or None if the expiration field\
was not found. The expiration time is returned in UTC.
"""
# Parse the cookie fields and look for an expiration field, and return None if
# the cookie has no expiration date.
cookie_fields = _ParseCookieFields(cookie)
return cookie_fields.get('expires')
def CookieHasExpired(cookie):
"""Checks whether the specified cookie expired or not.
Args:
cookie: String with the cookie information.
Returns:
True if the cookie has expired, false otherwise.
"""
# Get the cookie expiration time, if it is not found just assume the cookie
# has not expired yet.
expiration_time_string = GetCookieExpirationTime(cookie)
if expiration_time_string is None:
return False
# Parse the cookie expiration time and check if there are at least 5 minutes
# before expiration, otherwise the cookie might expire after this function
# exits but before the user action is complete.
expiration_time = datetime.strptime(expiration_time_string,
'%a, %d-%b-%Y %H:%M:%S %Z')
offset = time.altzone if time.daylight else time.timezone
today_gmt_time = datetime.today() + timedelta(seconds=offset)
time_left = expiration_time - today_gmt_time
return time_left < timedelta(minutes=5)
| mahadi123/hello-world | lib/google_login2.py | Python | apache-2.0 | 9,643 |
from flask import Flask, request, jsonify, abort
import os
import requests
app = Flask(__name__)
app.debug = os.getenv('DEBUG', '') == 'True'
def access_token():
return os.getenv('ACCESS_TOKEN', '')
def check_user_id(user_id):
if user_id not in os.getenv('USER_IDS', ''):
return abort(403)
def check_user_name(user_name):
if user_name not in os.getenv('USER_NAMES', ''):
return abort(403)
def perform_request(path):
r = requests.get(path)
response = jsonify(r.json(), status=202)
response.headers.add('Access-Control-Allow-Origin', '*')
return response
def build_recent_images_url(user_id):
return 'https://api.instagram.com/v1/users/' + user_id + '/media/recent/?access_token=' + access_token()
def build_user_profile_url(user_id):
return 'https://api.instagram.com/v1/users/' + user_id + '?access_token=' + access_token()
def build_media_url(user_name):
return 'https://www.instagram.com/' + user_name + '/media/'
@app.route("/recent_images/<path:user_id>")
def recent_images(user_id):
check_user_id(user_id)
return perform_request(build_recent_images_url(user_id))
@app.route("/user_profile/<path:user_id>")
def user_profile(user_id):
check_user_id(user_id)
return perform_request(build_user_profile_url(user_id))
@app.route("/media/<path:user_name>")
def media(user_name):
check_user_name(user_name)
return perform_request(build_media_url(user_name))
@app.route('/healthcheck')
def healthcheck():
return 'WORKING'
if __name__ == "__main__":
app.run()
| cyrilkyburz/bhwi_proxy | bhwi_proxy.py | Python | mit | 1,516 |
"""Known metrics decoder"""
import logging
logger = logging.getLogger(__name__)
class MetricsDecoder(object):
def __init__(self):
"""
translates telegraf metric names into common Monitoring metric names
translates `uncommon` names to `custom:%s`s
"""
self.known_metrics = {
'mem_used': 'Memory_used',
'mem_free': 'Memory_free',
'mem_buffered': 'Memory_buff',
'mem_cached': 'Memory_cached',
'kernel_context_switches': 'System_csw',
'kernel_interrupts': 'System_int',
'kernel_processes_forked': 'System_forks',
'processes_total': 'System_numproc',
'processes_total_threads': 'System_numthreads',
'system_load1': 'System_la1',
'system_load5': 'System_la5',
'system_load15': 'System_la15',
'nstat_TcpRetransSegs': 'Net_retransmit',
# those guys became inactive due to net interface names and disk ids
# we don't need unknown id data here
# 'net_packets_recv': 'Net_rx',
# 'net_packets_sent': 'Net_tx',
# 'net_bytes_recv': 'Net_recv',
# 'net_bytes_sent': 'Net_send',
# 'diskio_read_bytes': 'Disk_read',
# 'diskio_write_bytes': 'Disk_write',
# ----------
# remove this crunch after front refactoring
# 'cpu-cpu-total_usage_user': 'CPU_user',
# 'cpu-cpu-total_usage_system': 'CPU_system',
# 'cpu-cpu-total_usage_idle': 'CPU_idle',
# 'cpu-cpu-total_usage_iowait': 'CPU_iowait',
# 'cpu-cpu-total_usage_irq': 'CPU_irq',
# 'cpu-cpu-total_usage_nice': 'CPU_nice',
# 'cpu-cpu-total_usage_softirq': 'CPU_softirq',
# 'cpu-cpu-total_usage_steal': 'CPU_steal',
# 'cpu-cpu-total_usage_guest': 'CPU_guest'
}
self.diff_metrics = {
'cpu': [],
'mem': [],
'net': ['packets_recv', 'packets_sent', 'bytes_recv', 'bytes_sent'],
'nstat': ['TcpRetransSegs'],
'net_response': [],
'kernel': ['context_switches', 'interrupts', 'processes_forked', 'vmstat_pgfault', 'vmstat_pgmajfault'],
'diskio': [
'read_bytes', 'write_bytes', 'io_time', 'read_time', 'reads',
'write_time', 'writes'
],
'custom': []
}
def find_common_names(self, key):
if key in self.known_metrics:
return self.known_metrics[key]
else:
return 'custom:{}'.format(key)
decoder = MetricsDecoder()
| nnugumanov/yandex-tank | yandextank/plugins/Telegraf/decoder.py | Python | lgpl-2.1 | 2,675 |
# [h] copy glyphs to mask
import hTools2.dialogs.glyphs.mask
reload(hTools2.dialogs.glyphs.mask)
hTools2.dialogs.glyphs.mask.maskDialog()
| gferreira/hTools2_extension | hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/mask.py | Python | bsd-3-clause | 140 |
# UrbanFootprint v1.5
# Copyright (C) 2017 Calthorpe Analytics
#
# This file is part of UrbanFootprint version 1.5
#
# UrbanFootprint is distributed under the terms of the GNU General
# Public License version 3, as published by the Free Software Foundation. This
# code is distributed WITHOUT ANY WARRANTY, without implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License v3 for more details; see <http://www.gnu.org/licenses/>.
from django.db import models
__author__ = 'calthorpe_analytics'
class Cloneable(models.Model):
origin_instance = models.ForeignKey('self', null=True)
class Meta(object):
abstract = True
| CalthorpeAnalytics/urbanfootprint | footprint/main/mixins/cloneable.py | Python | gpl-3.0 | 692 |
from fabric.context_managers import settings, hide
from fabric.operations import run
import paramiko
# machine absraction
class Machine(object):
def __init__(self, node):
self.node = node
self.public_ip = node.ip
self.ssh_port = node.ssh_port
self.splunk_username = node.splunk_username
self.splunk_password = node.splunk_password
self.ssh_username = node.ssh_username
self.ssh_password = node.ssh_password
@property
def ssh_host_string(self):
return "%s:%s" % (self.self.public_ip, self.ssh_port)
@property
def is_ssh_accessible(self, timeout=10):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(self.public_ip, int(self.ssh_port), self.ssH_username, self.ssh_password, timeout=timeout)
return True
except Exception, e:
return False
def execute_command(self, cmd):
with settings(hide('warnings', 'running', 'stdout', 'stderr'), host_string=self.ssh_host_string,
user=self.ssh_username,
password=self.ssh_password, warn_only=True):
result = run(cmd, shell=True, pty=True)
return result, result.return_code
| markshao/paladin | orchestration/machine.py | Python | mit | 1,297 |
"""Inference/predict code for simple_sequence dataset
model must be trained before inference,
train_simple_sequence.py must be executed beforehand.
"""
from __future__ import print_function
import argparse
import os
import sys
import matplotlib
import numpy as np
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import training, iterators, serializers, optimizers, Variable, cuda
from chainer.training import extensions
sys.path.append(os.pardir)
from RNN import RNN
from RNN2 import RNN2
from RNN3 import RNN3
from RNNForLM import RNNForLM
def main():
archs = {
'rnn': RNN,
'rnn2': RNN2,
'rnn3': RNN3,
'lstm': RNNForLM
}
parser = argparse.ArgumentParser(description='simple_sequence RNN predict code')
parser.add_argument('--arch', '-a', choices=archs.keys(),
default='rnn', help='Net architecture')
#parser.add_argument('--batchsize', '-b', type=int, default=64,
# help='Number of images in each mini-batch')
parser.add_argument('--unit', '-u', type=int, default=100,
help='Number of LSTM units in each layer')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--primeindex', '-p', type=int, default=1,
help='base index data, used for sequence generation')
parser.add_argument('--length', '-l', type=int, default=100,
help='length of the generated sequence')
parser.add_argument('--modelpath', '-m', default='',
help='Model path to be loaded')
args = parser.parse_args()
print('GPU: {}'.format(args.gpu))
#print('# Minibatch-size: {}'.format(args.batchsize))
print('')
train, val, test = chainer.datasets.get_ptb_words()
n_vocab = max(train) + 1 # train is just an array of integers
print('#vocab =', n_vocab)
print('')
# load vocabulary
ptb_word_id_dict = chainer.datasets.get_ptb_words_vocabulary()
ptb_id_word_dict = dict((v, k) for k, v in ptb_word_id_dict.items())
# Model Setup
model = archs[args.arch](n_vocab=n_vocab, n_units=args.unit)
classifier_model = L.Classifier(model)
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use() # Make a specified GPU current
classifier_model.to_gpu() # Copy the model to the GPU
xp = np if args.gpu < 0 else cuda.cupy
if args.modelpath:
serializers.load_npz(args.modelpath, model)
else:
serializers.load_npz('result/{}_ptb.model'.format(args.arch), model)
# Dataset preparation
prev_index = args.primeindex
# Predict
predicted_sequence = [prev_index]
for i in range(args.length):
prev = chainer.Variable(xp.array([prev_index], dtype=xp.int32))
current = model(prev)
current_index = np.argmax(cuda.to_cpu(current.data))
predicted_sequence.append(current_index)
prev_index = current_index
predicted_text_list = [ptb_id_word_dict[i] for i in predicted_sequence]
print('Predicted sequence: ', predicted_sequence)
print('Predicted text: ', ' '.join(predicted_text_list))
if __name__ == '__main__':
main()
| corochann/deep-learning-tutorial-with-chainer | src/05_ptb_rnn/ptb/predict_ptb.py | Python | mit | 3,356 |
from matplotlib import pyplot as plt
import numpy as np
def stock_loss(true_return, yhat, alpha=100.):
if true_return * yhat < 0:
# opposite signs, not good
return alpha * yhat ** 2 - np.sign(true_return) * yhat \
+ abs(true_return)
else:
return abs(true_return - yhat)
def main():
true_value = .05
pred = np.linspace(-.04, .12, 75)
plt.plot(pred, [stock_loss(true_value, _p) for _p in pred],
label="Loss associated with\n prediction if true value = 0.05",
lw=3)
plt.vlines(0, 0, .25, linestyles="--")
plt.xlabel("prediction")
plt.ylabel("loss")
plt.xlim(-0.04, .12)
plt.ylim(0, 0.25)
true_value = -.02
plt.plot(pred, [stock_loss(true_value, _p) for _p in pred], alpha=0.6,
label="Loss associated with\n prediction if true value = -0.02",
lw=3)
plt.legend()
plt.title("Stock returns loss if true value = 0.05, -0.02")
plt.show()
if __name__ == '__main__':
main()
| noelevans/sandpit | bayesian_methods_for_hackers/stock_loss_function_example_ch05.py | Python | mit | 1,024 |
def extractCandleinthetombWordpressCom(item):
'''
Parser for 'candleinthetomb.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractCandleinthetombWordpressCom.py | Python | bsd-3-clause | 570 |
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.compat.six import string_types
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of mixed task/block data (parsed from YAML),
return a list of Block() objects, where implicit blocks
are created for each bare Task.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
assert isinstance(ds, (list, type(None)))
block_list = []
if ds:
for block in ds:
b = Block.load(
block,
play=play,
parent_block=parent_block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader
)
# Implicit blocks are created by bare tasks listed in a play without
# an explicit block statement. If we have two implicit blocks in a row,
# squash them down to a single block to save processing time later.
if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
for t in b.block:
t._block = block_list[-1]
block_list[-1].block.extend(b.block)
else:
block_list.append(b)
return block_list
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of task datastructures (parsed from YAML),
return a list of Task() or TaskInclude() objects.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
from ansible.template import Templar
assert isinstance(ds, list)
task_list = []
for task_ds in ds:
assert isinstance(task_ds, dict)
if 'block' in task_ds:
t = Block.load(
task_ds,
play=play,
parent_block=block,
role=role,
task_include=None,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
task_list.append(t)
else:
if 'include' in task_ds:
if use_handlers:
t = HandlerTaskInclude.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = TaskInclude.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
templar = Templar(loader=loader, variables=all_vars)
# check to see if this include is dynamic or static:
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
if t.static is not None:
is_static = t.static
else:
is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
(not templar._contains_vars(t.args['_raw_params']) and not t.loop)
if is_static:
if t.loop is not None:
raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)
# FIXME: all of this code is very similar (if not identical) to that in
# plugins/strategy/__init__.py, and should be unified to avoid
# patches only being applied to one or the other location
if task_include:
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = task_include
cumulative_path = None
while parent_include is not None:
parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
if cumulative_path is None:
cumulative_path = parent_include_dir
elif not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
include_target = templar.template(t.args['_raw_params'])
if t._role:
new_basedir = os.path.join(t._role._role_path, 'tasks', cumulative_path)
include_file = loader.path_dwim_relative(new_basedir, 'tasks', include_target)
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
break
else:
parent_include = parent_include._task_include
else:
try:
include_target = templar.template(t.args['_raw_params'])
except AnsibleUndefinedVariable as e:
raise AnsibleParserError(
"Error when evaluating variable in include name: %s.\n\n" \
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
"sources like group or host vars." % t.args['_raw_params'],
obj=task_ds,
suppress_extended_error=True,
)
if t._role:
if use_handlers:
include_file = loader.path_dwim_relative(t._role._role_path, 'handlers', include_target)
else:
include_file = loader.path_dwim_relative(t._role._role_path, 'tasks', include_target)
else:
include_file = loader.path_dwim(include_target)
try:
data = loader.load_from_file(include_file)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks", obj=data)
except AnsibleFileNotFound as e:
if t.static or \
C.DEFAULT_TASK_INCLUDES_STATIC or \
C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
raise
display.deprecated(
"Included file '%s' not found, however since this include is not " \
"explicitly marked as 'static: yes', we will try and include it dynamically " \
"later. In the future, this will be an error unless 'static: no' is used " \
"on the include task. If you do not want missing includes to be considered " \
"dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
"options to make all inclues static for tasks and/or handlers" % include_file,
)
task_list.append(t)
continue
included_blocks = load_list_of_blocks(
data,
play=play,
parent_block=block,
task_include=t,
role=role,
use_handlers=use_handlers,
loader=loader,
variable_manager=variable_manager,
)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = t.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(t.tags) > 0:
raise AnsibleParserError(
"Include tasks should not specify tags in more than one way (both via args and directly on the task)." \
" Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=task_ds,
suppress_extended_error=True,
)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
else:
tags = t.tags[:]
# now we extend the tags on each of the included blocks
for b in included_blocks:
b.tags = list(set(b.tags).union(tags))
# END FIXME
# FIXME: send callback here somehow...
# FIXME: handlers shouldn't need this special handling, but do
# right now because they don't iterate blocks correctly
if use_handlers:
for b in included_blocks:
task_list.extend(b.block)
else:
task_list.extend(included_blocks)
else:
task_list.append(t)
else:
if use_handlers:
t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list
def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.role.include import RoleInclude
assert isinstance(ds, list)
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
| levenlabs/ansible | lib/ansible/playbook/helpers.py | Python | gpl-3.0 | 12,947 |
#!/usr/bin/python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
# The unittest framwork doesn't play nice with pylint:
# pylint: disable-msg=C0103
from __future__ import absolute_import
import unittest
from svtplay_dl.service.oppetarkiv import OppetArkiv
from svtplay_dl.service.tests import HandlesURLsTestMixin
class handlesTest(unittest.TestCase, HandlesURLsTestMixin):
service = OppetArkiv
urls = {"ok": ["http://www.oppetarkiv.se/video/1129844/jacobs-stege-avsnitt-1-av-1"], "bad": ["http://www.svtplay.se/video/1090393/del-9"]}
| olof/svtplay-dl | lib/svtplay_dl/service/tests/oppetarkiv.py | Python | mit | 591 |
from lxml.builder import E
class ErrorValueCalculator(object):
element_name='ErrorValueCalculator'
def to_xml(self):
if isinstance(self, MPIErrorValueCalculator):
type_xml = E(self.element_name + 'Type', 'MPI')
elif isinstance(self, RMSErrorValueCalculator):
type_xml = E(self.element_name + 'Type', 'RMS')
elif isinstance(self, MatrixErrorValueCalculator):
type_xml = E(self.element_name + 'Type', 'Matrix')
else:
assert(False), ("The class '{}' is not listed in ErrorValueCalculator.to_xml()"
.format(type(self)))
return (type_xml, self._to_xml())
@classmethod
def from_xml(cls, type_element, parameters_element):
error_value_calculator_type = type_element.text.strip()
if error_value_calculator_type == 'MPI':
error_value_calculator = MPIErrorValueCalculator.from_xml(parameters_element)
elif error_value_calculator_type == 'RMS':
error_value_calculator = RMSErrorValueCalculator.from_xml(parameters_element)
elif error_value_calculator_type == 'Matrix':
error_value_calculator = MatrixErrorValueCalculator.from_xml(parameters_element)
else:
raise Exception("Unrecognised type '{}' of traces reader"
.format(error_value_calculator_type))
return error_value_calculator
class MatrixErrorValueCalculator(ErrorValueCalculator):
def __init__(self, vdVdt_matrix, enable_file_export, export_file):
"""
`vdVdt_matrix` -- ??
`enable_file_export` -- To enable export of all the evaluated parameter sets, set to 0 if you
don't want output to a file
`export_file` -- The name of the export file you can set by
"""
self.vdVdt_matrix = vdVdt_matrix
self.enable_file_export = enable_file_export
self.export_file = export_file
def _to_xml(self):
return E(self.element_name + 'Parameters',
*(self.vdVdt_matrix.to_xml() +
(E('enableFileExport', self.enable_file_export),
E('exportFile', self.export_file))))
@classmethod
def from_xml(cls, element):
vdVdt_matrix = VdVdtMatrix.from_xml(
element.find(VdVdtMatrix.element_name + 'Type'),
element.find(VdVdtMatrix.element_name + 'Parameters'))
enable_file_export = element.find('enableFileExport').text.strip()
export_file = element.find('exportFile').text.strip()
return cls(vdVdt_matrix, enable_file_export, export_file)
class RMSErrorValueCalculator(ErrorValueCalculator):
def __init__(self, enable_file_export, export_file, enable_traces_export):
"""
`enable_file_export` -- To enable export of all the evaluated parameter sets, set to 0 if you
don't want output to a file
`export_file` -- The name of the export file you can set by
`enable_traces_export` -- ??
"""
self.export_file = export_file
self.enable_file_export = enable_file_export
self.enable_traces_export = enable_traces_export
def _to_xml(self):
return E(self.element_name + 'Parameters',
E('enableFileExport', self.enable_file_export),
E('enableTracesExport', self.enable_traces_export),
E('exportFile', self.export_file))
@classmethod
def from_xml(cls, element):
enable_file_export = element.find('enableFileExport').text.strip()
export_file = element.find('exportFile').text.strip()
enable_traces_export = element.find('enableTracesExport').text.strip()
return cls(enable_file_export, export_file, enable_traces_export)
class MPIErrorValueCalculator(ErrorValueCalculator):
def __init__(self, calculator, enable_file_export, export_file):
"""
`calculator` -- The actual calculator to be distributed over MPI
`enable_file_export` -- To enable export of all the evaluated parameter sets, set to 0 if you
don't want output to a file
`export_file` -- The name of the export file you can set by
"""
self.calculator = calculator
self.export_file = export_file
self.enable_file_export = enable_file_export
def _to_xml(self):
return E(self.element_name + 'Parameters',
*(self.calculator.to_xml() +
(E('enableFileExport', self.enable_file_export),
E('exportFile', self.export_file))))
@classmethod
def from_xml(cls, element):
calculator = ErrorValueCalculator.from_xml(element.find(ErrorValueCalculator.element_name + 'Type'),
element.find(ErrorValueCalculator.element_name + 'Parameters'))
enable_file_export = element.find('enableFileExport').text.strip()
export_file = element.find('exportFile').text.strip()
return cls(calculator, enable_file_export, export_file)
class VdVdtMatrix(object):
element_name = 'VdVdtMatrix'
def to_xml(self):
if isinstance(self, DirectVdVdtMatrix):
type_xml = E(self.element_name + 'Type', 'Direct')
else:
assert(False), ("The class '{}' is not listed in VdVdtMatrix.to_xml()"
.format(type(self)))
return (type_xml, self._to_xml())
@classmethod
def from_xml(cls, type_element, parameters_element):
vdVdt_matrix = type_element.text.strip()
if vdVdt_matrix == 'Direct':
obj = DirectVdVdtMatrix.from_xml(parameters_element)
else:
raise Exception("Unrecognised type '{}' of traces reader".format(vdVdt_matrix))
return obj
class DirectVdVdtMatrix(VdVdtMatrix):
def __init__(self, v_length, dvdt_length, minimal_v, maximal_v, compare_precision,
numeric_output_format, sum_of_square_roots):
"""
??
`v_length` -- The number of bins in the V direction of the VdVdtMatrix
`dvdt_length` -- The number of bins in the dVdt direction of the VdVdtMatrix
`minimal_v` -- The minimal V in the VdVdtMatrix (units should be same as output data traces)
`maximal_v` -- The maximal V in the VdVdtMatrix (units should be same as output data traces)
`compare_precision` -- Value used to compare if two values are identical (should be set very low)
`numeric_output_format` -- You can let Neurofitter show numerical output of the matrices if
the verboselevel is very high
`sum_of_square_roots` -- Let Neurofitter calculate the sum of the square roots instead of
sum squares when calculating the error value. Setting this value to
1 is supposed to be better.
"""
self.v_length = int(v_length)
self.dvdt_length = int(dvdt_length)
self.minimal_v = float(minimal_v)
self.maximal_v = float(maximal_v)
self.compare_precision = float(compare_precision)
self.numeric_output_format = numeric_output_format
self.sum_of_square_roots = sum_of_square_roots
def _to_xml(self):
return E(self.element_name + 'Parameters',
E('vLength', str(self.v_length)),
E('dVdtLength', str(self.dvdt_length)),
E('minimalV', str(self.minimal_v)),
E('maximalV', str(self.maximal_v)),
E('comparePrecision', str(self.compare_precision)),
E('numericOutputFormat', self.numeric_output_format),
E('SumOfSquareRoots', self.sum_of_square_roots))
@classmethod
def from_xml(cls, element):
v_length = element.find('vLength').text.strip()
dvdt_length = element.find('dVdtLength').text.strip()
minimal_v= element.find('minimalV').text.strip()
maximal_v= element.find('maximalV').text.strip()
compare_precision= element.find('comparePrecision').text.strip()
numeric_output_format= element.find('numericOutputFormat').text.strip()
sum_of_square_roots = element.find('SumOfSquareRoots').text.strip()
return cls(v_length, dvdt_length, minimal_v, maximal_v, compare_precision,
numeric_output_format, sum_of_square_roots)
| wvangeit/NeuroFitter | python/libneurofitterml/error_value_calculator.py | Python | gpl-2.0 | 8,632 |
from .intensity_family import IntensityFamily
from . import viboud_chowell
from .viboud_chowell import ViboudChowellFamily
from . import gaussian
from .gaussian import GaussianFamily
from . import soft_laplace
from .soft_laplace import SoftLaplaceFamily
from .high_level import *
from . import constants
from . import data_model
from . import sir_sim
from . import sparse
| HopkinsIDD/EpiForecastStatMech | epi_forecast_stat_mech/__init__.py | Python | apache-2.0 | 375 |
"""
Return list of messages given a datetime (empty is now) : [ (title, message), ]
Load, unload and reload messages give their name
"""
import yaml
import glob
import os.path
from datetime import datetime
from collections import OrderedDict
from messageApp.messages import Messages
class MessageApp():
def __init__(self, messagesDir='messages', filePattern="messages_"):
self._filePattern = os.path.join(messagesDir, filePattern)
self._loadedMessages = OrderedDict()
# return list of estcequecestbientot [ (title, message), ] at now
def getMessages(self):
time = datetime.now()
return self.getMessagesAtTime(time)
# given a datetime
# return list of estcequecestbientot [ (title, message), ]
def getMessagesAtTime(self, time):
estcequecestbientot = []
for _, messages in self._loadedMessages.items():
message = messages.getMessage(time)
if message:
estcequecestbientot.append(message)
return estcequecestbientot
# given a (string) messageNames (a , separate list)
# [re]-instanciate some Messages in _loadedMessages[] with it
def loadMessage(self, messageNames):
if type(messageNames) is str:
messageNames = (messageNames, )
for name in messageNames:
messageObject = self._getMessageObject(name)
if messageObject:
self._loadedMessages[name] = Messages(messageObject)
# given a (string) messageNames (a , separate list)
# unload some Messages in _loadedMessages[]
def unloadMessage(self, messageNames):
if type(messageNames) is str:
messageNames = (messageNames, )
for name in messageNames:
if (name in self._loadedMessages):
self._loadedMessages.pop(name)
# reload all _loadedMessages[]
def reload(self):
for name, _ in self._loadedMessages.items():
self.loadMessage(name)
# return (notloadedList, loadedList)
def listMessages(self):
loadedList = [ name for name, _ in self._loadedMessages.items() ]
notloadedList = list(set(self._getNameList()) - set(loadedList))
return notloadedList, loadedList
# given a name
# return a messageObject
# using _filePattern and the file system
def _getMessageObject(self, name):
nameList = self._getNameList()
if name in nameList:
filename = self._filePattern + name + '.yaml'
f = open(filename, 'r')
messageObject = yaml.load(f)
return messageObject
return None
# return an updated list of name of existing [messageObject]
# using _filePattern and the file system
def _getNameList(self):
fileList = glob.glob(self._filePattern + "*.yaml")
nameStartAt = len(self._filePattern)
nameList = [ name[nameStartAt:-5] for name in fileList ]
return nameList
| arnaudcordier/estcequecestbientot | messageApp/messageApp.py | Python | mit | 2,610 |
#coding=utf-8
from uliweb import expose, functions
@expose('/admin/models')
class AdminModelsView(object):
def __begin__(self):
functions.require_login()
def __init__(self):
from uliweb import settings
self.models = []
for k in settings.ADMIN_MODELS.models:
self.models.append(k)
@expose('', defaults={'model':''})
@expose('<model>')
def index(self, model):
if model not in self.models:
model = ''
template_data = {'table':''}
if model:
fields = self._get_fields(model, 'list')
view = functions.ListView(model, fields=fields)
if 'data' in request.values:
return json(view.json())
else:
result = view.run(json_result=True)
template_data.update({'table':view})
template_data.update({'models':self.models, 'model':model})
return template_data
def _get_section(self, modelname, type):
"""
Get section info from settings
model should be model's name
type should be 'list', 'add', 'edit'
"""
from uliweb import settings
return settings.get_var('ADMIN_MODEL_%s_%s' % (modelname.upper(), type.upper()), {})
def _get_fields(self, modelname, type):
"""
Get fields according Model and type
type should be 'list', 'add', 'edit'
It'll find settings just like 'ADMIN_MODEL_<modelname>_<type>',
if not found, it'll use Model._fields_list
"""
section = self._get_section(modelname, type)
fields = section.get('fields')
if not fields:
fields = [k for k, prop in functions.get_model(modelname)._fields_list
if not ((hasattr(prop, 'auto_now') and prop.auto_now) or
(hasattr(prop, 'auto_now_add') and prop.auto_now_add))
]
return fields
def _get_parameters(self, modelname, type, params):
from uliweb.utils.common import import_attr
d = {}
section = self._get_section(modelname, type)
for k, _t in params.items():
_type = _t.get('type')
_default = _t.get('default')
v = section.get(k)
if not v:
v = _default
if v:
if _type == 'function':
if isinstance(v, (str, unicode)):
v = import_attr(v)
else:
if callable(v):
v = v()
d[k] = v
return d
def _post_created_form(self, fcls):
from uliweb.form import SelectField
from uliweb.form.widgets import Button
from uliweb.core.html import Tag
fcls.layout_class = 'bs3t'
fcls.form_buttons = [
str(Button(value=_('Save'), _class="btn btn-primary btn-sm",
name="submit", type="submit")),
str(Tag('a', _('Cancel'), _class="btn btn-default btn-sm",
href="javascript:history.go(-1);")),
]
for k, v in fcls.fields.items():
if isinstance(v, SelectField):
v.empty = None
@expose('<model>/add')
def add(self, model):
if '__ajax__' in request.GET:
response.template = 'AdminModelsView/ajax_add.html'
json_result = True
else:
json_result = False
def post_created_form(fcls, model):
self._post_created_form(fcls)
template_data = {'model':model}
mapping = {
'pre_save':{'type':'function'},
}
params = self._get_parameters(model, 'add', mapping)
view = functions.AddView(model,
ok_url=url_for(self.__class__.index, model=model),
fields=self._get_fields(model, 'add'),
post_created_form=post_created_form,
template_data=template_data,
**params)
return view.run(json_result=json_result)
@expose('<model>/edit')
def edit(self, model):
id = request.GET.get('id')
if not id:
error("There is no id parameter")
obj = functions.get_object(model, int(id))
if not obj:
error("Can't found object %s of Model %s" % (id, model))
def post_created_form(fcls, model, obj):
self._post_created_form(fcls)
template_data = {'model':model}
mapping = {
'pre_save':{'type':'function'},
}
params = self._get_parameters(model, 'edit', mapping)
static_fields = ['title']
view = functions.EditView(model, obj=obj,
ok_url=url_for(self.__class__.index, model=model),
fields=self._get_fields(model, 'edit'),
post_created_form=post_created_form,
template_data=template_data,
static_fields=static_fields,
**params)
return view.run()
@expose('<model>/delete')
def delete(self, model):
ids = request.POST.getlist('ids')
Model = functions.get_model(model)
Model.filter(Model.c.id.in_(ids)).remove()
return json({'success':True, 'message':'删除成功'})
@expose('/test')
def test():
Category = functions.get_model('blogcategory')
result = []
for row in Category.all():
result.append({'id':row.id, 'text':unicode(row)})
return json(result) | uliwebext/uliweb-peafowl | uliweb_peafowl/admin_models/views_models.py | Python | bsd-2-clause | 5,641 |
#!/usr/bin/python3
# -*- coding: utf8 -*-
# File: parser.py
#
# By Maxime Brodat <maxime.brodat@fouss.fr>
#
# Created: 17/04/2016 by Fouss
"""Parser for the transshipment solver project"""
from ag41_transshipment.solver import get_platform_list
import networkx as nx
import math
class Parser(object):
"""Parser class"""
def __init__(self, file_path):
"""Creates the Parser object"""
try:
tmp = open(file_path, 'r')
tmp.close()
self.file_path = file_path
except IOError:
raise IOError('File {} doesn\'t exist'.format(file_path))
def import_from_file(self):
"""Imports all informations about the problem"""
file = open(self.file_path, 'r')
new_graph = nx.DiGraph()
i = 0
for line in file.readlines():
i += 1
line = line.split()
if '#' in line[0]:
pass
elif line[0] == 'NODE:':
new_graph.add_node(int(line[1]), x=float(line[2]), y=float(line[3]), demand=int(line[4]),
unit_cost=float(line[5]), time=float(line[6]), flow=0)
elif line[0] == 'EDGE:':
if float(line[4]) != 0:
new_graph.add_edge(int(line[2]), int(line[3]), id=int(line[1]), capacity=int(line[4]),
fixed_cost=float(line[5]), unit_cost=float(line[6]), time=float(line[7]), flow=0)
elif line[0] == 'NAME':
new_graph.graph['name'] = line[2]
elif line[0] == 'NBR_NODES':
new_graph.graph['nbr_nodes'] = int(line[2])
elif line[0] == 'NBR_EDGES':
new_graph.graph['nbr_edges'] = int(line[2])
elif line[0] == 'T':
new_graph.graph['time'] = float(line[2])
elif line[0] == 'EOF':
break
else:
raise SyntaxError('File {} has syntax error at line {}'.format(self.file_path, i))
file.close()
return new_graph
def export_to_file(self, init_graph, graph, u_time, s_time):
"""Exports the solution of the problem"""
file = open(self.file_path + '.sol', 'w+')
file.write('###############\n')
file.write('# FILE LOADED #\n')
file.write('###############\n\n')
file.write('Problem file: {}\n'.format(self.file_path))
file.write('Solution file: {}\n'.format(self.file_path + '.sol'))
if graph.graph['feasible']:
file.write('\n####################\n')
file.write('# INITIAL SOLUTION #\n')
file.write('####################\n\n')
cost = 0
for u, v in init_graph.edges_iter():
if init_graph.edge[u][v]['flow'] > 0:
cost += init_graph.edge[u][v]['flow'] * init_graph.edge[u][v]['unit_cost'] + init_graph.edge[u][v]['fixed_cost']
file.write('Edge #{} from node #{} to node #{} used with flow={}\n'.format(init_graph.edge[u][v]['id'], u, v, init_graph.edge[u][v]['flow']))
file.write('\nResult: {}\n'.format(cost))
if graph.graph['interrupted']:
file.write('\n#####################################\n')
file.write('# BEST SOLUTION FOUND #\n')
file.write('# The program has been interrupted! #\n')
file.write('#####################################\n\n')
else:
file.write('\n####################\n')
file.write('# OPTIMAL SOLUTION #\n')
file.write('####################\n\n')
cost = 0
for u, v in graph.edges_iter():
if graph.edge[u][v]['flow'] > 0:
cost += graph.edge[u][v]['flow'] * graph.edge[u][v]['unit_cost'] + graph.edge[u][v]['fixed_cost']
file.write('Edge #{} from node #{} to node #{} used with flow={}\n'.format(graph.edge[u][v]['id'], u, v,
graph.edge[u][v]['flow']))
file.write('\nResult: {}\n'.format(cost))
file.write('\n###################\n')
file.write('# RESOLUTION TIME #\n')
file.write('###################\n\n')
u_hour = (u_time - (u_time % 3600.))/3600
s_hour = (s_time - (s_time % 3600.))/3600
u_time -= u_hour * 3600.
s_time -= s_hour * 3600.
u_min = (u_time - (u_time % 60.))/60
s_min = (s_time - (s_time % 60.))/60
u_time -= u_min * 60.
s_time -= s_min * 60.
file.write('Execution time:\n')
file.write('\tUser time : {} hours, {} minutes and {} seconds\n'.format(u_hour, u_min, u_time))
file.write('\tSystem time : {} hours, {} minutes and {} seconds\n'.format(s_hour, s_min, s_time))
else:
file.write('\nThe problem can\'t be solved!\n')
file.close()
| MrFouss/Ubiquitous-Shipping | ag41_transshipment/parser.py | Python | gpl-3.0 | 5,074 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.