code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#modules
import math
import os
import sys
import numpy as np
import math
import mxnet as mx
#custom modules
import config
##############################################
#load input data
##############################################
if config.q < min(config.filter_list):
print("\n\n\n\t\tINCREASE q...")
#read in multivariate time series data
x = np.load("../data/electric.npy")
print("\n\tlength of time series: ", x.shape[0])
if config.max_training_examples:
x = x[:config.max_training_examples]
##############################################
# loop through data constructing features/labels
##############################################
#create arrays for storing values in
x_ts = np.zeros((x.shape[0] - config.q, config.q, x.shape[1]))
y_ts = np.zeros((x.shape[0] - config.q, x.shape[1]))
#loop through collecting records for ts analysis depending on q
for n in list(range(x.shape[0])):
if n + 1 < config.q:
continue
if n + 1 + config.horizon > x.shape[0]:
continue
else:
y_n = x[n+config.horizon,:]
x_n = x[n+1 - config.q:n+1,:]
x_ts[n - config.q] = x_n
y_ts[n - config.q] = y_n
#split into training and testing data
training_examples = int(x_ts.shape[0] * config.split[0])
valid_examples = int(x_ts.shape[0] * config.split[1])
x_train = x_ts[:training_examples]
y_train = y_ts[:training_examples]
x_valid = x_ts[training_examples:training_examples + valid_examples]
y_valid = y_ts[training_examples:training_examples + valid_examples]
x_test = x_ts[training_examples + valid_examples:]
y_test = y_ts[training_examples + valid_examples:]
# print("\nsingle X, Y record example:\n\n")
# print(x_train[:1])
# print(y_train[:1])
# print(x_train[1:2])
# print(y_train[1:2])
print("\ntraining examples: ", x_train.shape[0],
"\n\nvalidation examples: ", x_valid.shape[0],
"\n\ntest examples: ", x_test.shape[0],
"\n\nwindow size: ", config.q,
"\n\nskip length p: ", config.seasonal_period / config.time_interval)
###############################################
#define input data iterators for training and testing
###############################################
train_iter = mx.io.NDArrayIter(data={'seq_data': x_train},
label={'seq_label': y_train},
batch_size=config.batch_size)
val_iter = mx.io.NDArrayIter(data={'seq_data': x_valid},
label={'seq_label': y_valid},
batch_size=config.batch_size)
test_iter = mx.io.NDArrayIter(data={'seq_data': x_test},
label={'seq_label': y_test},
batch_size=config.batch_size)
#print input shapes
input_feature_shape = train_iter.provide_data[0][1]
input_label_shape = train_iter.provide_label[0][1]
print("\nfeature input shape: ", input_feature_shape,
"\nlabel input shape: ", input_label_shape)
####################################
# define neural network graph
####################################
#create placeholders to refer to when creating network graph (names are defined in data iterators)
seq_data = mx.symbol.Variable(train_iter.provide_data[0].name)
seq_label = mx.sym.Variable(train_iter.provide_label[0].name)
# scale input data so features are all between 0 and 1 (may not need this)
normalized_seq_data = mx.sym.BatchNorm(data = seq_data)
# reshape data before applying convolutional layer (takes 4D shape incase you ever work with images)
conv_input = mx.sym.Reshape(data=seq_data, shape=(config.batch_size, 1, config.q, x.shape[1]))
print("\n\t#################################\n\
#convolutional component:\n\
#################################\n")
#create many convolutional filters to slide over the input
outputs = []
for i, filter_size in enumerate(config.filter_list):
# zero pad the input array, adding rows at the top only
# this ensures the number output rows = number input rows after applying kernel
padi = mx.sym.pad(data=conv_input, mode="constant", constant_value=0,
pad_width=(0, 0, 0, 0, filter_size - 1, 0, 0, 0))
padi_shape = padi.infer_shape(seq_data=input_feature_shape)[1][0]
# apply convolutional layer (the result of each kernel position is a single number)
convi = mx.sym.Convolution(data=padi, kernel=(filter_size, x.shape[1]), num_filter=config.num_filter)
convi_shape = convi.infer_shape(seq_data=input_feature_shape)[1][0]
#apply relu activation function as per paper
acti = mx.sym.Activation(data=convi, act_type='relu')
#transpose output to shape in preparation for recurrent layer (batches, q, num filter, 1)
transposed_convi = mx.symbol.transpose(data=acti, axes= (0,2,1,3))
transposed_convi_shape = transposed_convi.infer_shape(seq_data=input_feature_shape)[1][0]
#reshape to (batches, q, num filter)
reshaped_transposed_convi = mx.sym.Reshape(data=transposed_convi, target_shape=(config.batch_size, config.q, config.num_filter))
reshaped_transposed_convi_shape = reshaped_transposed_convi.infer_shape(seq_data=input_feature_shape)[1][0]
#append resulting symbol to a list
outputs.append(reshaped_transposed_convi)
print("\n\tpadded input size: ", padi_shape)
print("\n\t\tfilter size: ", (filter_size, x.shape[1]), " , number of filters: ", config.num_filter)
print("\n\tconvi output layer shape (notice length is maintained): ", convi_shape)
print("\n\tconvi output layer after transposing: ", transposed_convi_shape)
print("\n\tconvi output layer after reshaping: ", reshaped_transposed_convi_shape)
#concatenate symbols to (batch, total_filters, q, 1)
conv_concat = mx.sym.Concat(*outputs, dim=2)
conv_concat_shape = conv_concat.infer_shape(seq_data=input_feature_shape)[1][0]
print("\nconcat output layer shape: ", conv_concat_shape)
#apply a dropout layer
conv_dropout = mx.sym.Dropout(conv_concat, p = config.dropout)
print("\n\t#################################\n\
#recurrent component:\n\
#################################\n")
#define a gated recurrent unit cell, which we can unroll into many symbols based on our desired time dependancy
cell_outputs = []
for i, recurrent_cell in enumerate(config.rcells):
#unroll the lstm cell, obtaining a symbol each time step
outputs, states = recurrent_cell.unroll(length=conv_concat_shape[1], inputs=conv_dropout, merge_outputs=False, layout="NTC")
#we only take the output from the recurrent layer at time t
output = outputs[-1]
#just ensures we can have multiple RNN layer types
cell_outputs.append(output)
print("\n\teach of the ", conv_concat_shape[1], " unrolled hidden cells in the RNN is of shape: ",
output.infer_shape(seq_data=input_feature_shape)[1][0],
"\nNOTE: only the output from the unrolled cell at time t is used...")
#concatenate output from each type of recurrent cell
rnn_component = mx.sym.concat(*cell_outputs, dim=1)
print("\nshape after combining RNN cell types: ", rnn_component.infer_shape(seq_data=input_feature_shape)[1][0])
#apply a dropout layer to output
rnn_dropout = mx.sym.Dropout(rnn_component, p=config.dropout)
print("\n\t#################################\n\
#recurrent-skip component:\n\
#################################\n")
# connect hidden cells that are a defined time interval apart,
# because in practice very long term dependencies are not captured by LSTM/GRU
# eg if you are predicting electricity consumption you want to connect data 24hours apart
# and if your data is every 60s you make a connection between the hidden states at time t and at time t + 24h
# this connection would not be made by an LSTM since 24 is so many hidden states away
#define number of cells to skip through to get a certain time interval back from current hidden state
p =int(config.seasonal_period / config.time_interval)
print("adding skip connections for cells ", p, " intervals apart...")
#define a gated recurrent unit cell, which we can unroll into many symbols based on our desired time dependancy
skipcell_outputs = []
for i, recurrent_cell in enumerate(config.skiprcells):
#unroll the rnn cell, obtaining an output and state symbol each time
outputs, states = recurrent_cell.unroll(length=conv_concat_shape[1], inputs=conv_dropout, merge_outputs=False, layout="NTC")
#for each unrolled timestep
counter = 0
connected_outputs = []
for i, current_cell in enumerate(reversed(outputs)):
#try adding a concatenated skip connection
try:
#get seasonal cell p steps apart
skip_cell = outputs[i + p]
#connect this cell to is seasonal neighbour
cell_pair = [current_cell, skip_cell]
concatenated_pair = mx.sym.concat(*cell_pair, dim=1)
#prepend symbol to a list
connected_outputs.insert(0, concatenated_pair)
counter += 1
except IndexError:
#prepend symbol to a list without skip connection
connected_outputs.insert(0, current_cell)
selected_outputs = []
for i, current_cell in enumerate(connected_outputs):
t = i + 1
#use p hidden states of Recurrent-skip component from time stamp t − p + 1 to t as outputs
if t > conv_concat_shape[1] - p:
selected_outputs.append(current_cell)
#concatenate outputs
concatenated_output = mx.sym.concat(*selected_outputs, dim=1)
#append to list
skipcell_outputs.append(concatenated_output)
print("\n\t", len(selected_outputs), " hidden cells used in output of shape: ",
concatenated_pair.infer_shape(seq_data=input_feature_shape)[1][0], " after adding skip connections")
print("\n\tconcatenated output shape for each skipRNN cell type: ",
concatenated_output.infer_shape(seq_data=input_feature_shape)[1][0])
#concatenate output from each type of recurrent cell
skiprnn_component = mx.sym.concat(*skipcell_outputs, dim=1)
print("\ncombined flattened recurrent-skip shape : ", skiprnn_component.infer_shape(seq_data=input_feature_shape)[1][0])
#apply a dropout layer
skiprnn_dropout = mx.sym.Dropout(skiprnn_component, p=config.dropout)
print("\n\t#################################\n\
#autoregressive component:\n\
#################################\n")
#AR component simply says the next prediction
# is the some constant times all the previous values available for that time series
auto_list = []
for i in list(range(x.shape[1])):
#get a symbol representing data in each individual time series
time_series = mx.sym.slice_axis(data = seq_data, axis = 2, begin = i, end = i+1)
#pass to a fully connected layer
fc_ts = mx.sym.FullyConnected(data=time_series, num_hidden=1)
auto_list.append(fc_ts)
print("\neach time series shape: ", time_series.infer_shape(seq_data=input_feature_shape)[1][0])
#concatenate fully connected outputs
ar_output = mx.sym.concat(*auto_list, dim=1)
print("\nar component shape: ", ar_output.infer_shape(seq_data=input_feature_shape)[1][0])
#do not apply activation function since we want this to be linear
print("\n\t#################################\n\
#combine AR and NN components:\n\
#################################\n")
#combine model components
neural_components = mx.sym.concat(*[rnn_dropout, skiprnn_dropout], dim = 1)
#pass to fully connected layer to map to a single value
neural_output = mx.sym.FullyConnected(data=neural_components, num_hidden=x.shape[1])
print("\nNN output shape : ", neural_output.infer_shape(seq_data=input_feature_shape)[1][0])
#sum the output from AR and deep learning
model_output = neural_output + ar_output
print("\nshape after adding autoregressive output: ", model_output.infer_shape(seq_data=input_feature_shape)[1][0])
#########################################
# loss function
#########################################
#compute the gradient of the L2 loss
loss_grad = mx.sym.LinearRegressionOutput(data = model_output, label = seq_label)
#set network point to back so name is interpretable
batmans_NN = loss_grad
#########################################
# create a trainable module on CPU/GPUs
#########################################
model = mx.mod.Module(symbol=batmans_NN,
context=config.context,
data_names=[v.name for v in train_iter.provide_data],
label_names=[v.name for v in train_iter.provide_label])
####################################
#define evaluation metrics to show when training
#####################################
#root relative squared error
def rse(label, pred):
"""computes the root relative squared error
(condensed using standard deviation formula)"""
#compute the root of the sum of the squared error
numerator = np.sqrt(np.mean(np.square(label - pred), axis = None))
#numerator = np.sqrt(np.sum(np.square(label - pred), axis=None))
#compute the RMSE if we were to simply predict the average of the previous values
denominator = np.std(label, axis = None)
#denominator = np.sqrt(np.sum(np.square(label - np.mean(label, axis = None)), axis=None))
return numerator / denominator
_rse = mx.metric.create(rse)
#relative absolute error
def rae(label, pred):
"""computes the relative absolute error
(condensed using standard deviation formula)"""
#compute the root of the sum of the squared error
numerator = np.mean(np.abs(label - pred), axis=None)
#numerator = np.sum(np.abs(label - pred), axis = None)
#compute AE if we were to simply predict the average of the previous values
denominator = np.mean(np.abs(label - np.mean(label, axis=None)), axis=None)
#denominator = np.sum(np.abs(label - np.mean(label, axis = None)), axis=None)
return numerator / denominator
_rae = mx.metric.create(rae)
#empirical correlation coefficient
def corr(label, pred):
"""computes the empirical correlation coefficient"""
#compute the root of the sum of the squared error
numerator1 = label - np.mean(label, axis=0)
numerator2 = pred - np.mean(pred, axis = 0)
numerator = np.mean(numerator1 * numerator2, axis=0)
#compute the root of the sum of the squared error if we were to simply predict the average of the previous values
denominator = np.std(label, axis=0) * np.std(pred, axis=0)
#value passed here should be 321 numbers
return np.mean(numerator / denominator)
_corr = mx.metric.create(corr)
#use mxnet native metric function
# eval_metrics = mx.metric.CompositeEvalMetric()
# for child_metric in [_rse, _rae, _corr]:
# eval_metrics.add(child_metric)
#create a composite metric manually as a sanity check whilst training
def metrics(label, pred):
return ["RSE: ", rse(label, pred), "RAE: ", rae(label, pred), "CORR: ", corr(label, pred)]
################
# #fit the model
################
# allocate memory given the input data and label shapes
model.bind(data_shapes=train_iter.provide_data,
label_shapes=train_iter.provide_label)
# initialize parameters by uniform random numbers
model.init_params()
# optimizer
model.init_optimizer(optimizer=config.optimizer, optimizer_params=config.optimizer_params)
# train n epochs, i.e. going over the data iter one pass
try:
for epoch in range(config.num_epoch):
train_iter.reset()
val_iter.reset()
#eval_metrics.reset()
for batch in train_iter:
model.forward(batch, is_train=True) # compute predictions
model.backward() # compute gradients
model.update() # update parameters
#model.update_metric(eval_metrics, batch.label) # update metrics
# compute train metrics
pred = model.predict(train_iter).asnumpy()
label = y_train
print('\n', 'Epoch %d, Training %s' % (epoch, metrics(label, pred)))
#print('Epoch %d, Training %s' % (epoch, eval_metrics.get()))
# compute test metrics
pred = model.predict(val_iter).asnumpy()
label = y_valid
print('Epoch %d, Validation %s' % (epoch, metrics(label, pred)))
################
# save model after epochs or if user exits early
################
except KeyboardInterrupt:
print('\n' * 5, '-' * 89)
print('Exiting from training early, saving model...')
model.save_checkpoint(
prefix='elec_model',
epoch=config.num_epoch,
save_optimizer_states=False)
print('\n' * 5, '-' * 89)
model.save_checkpoint(
prefix='elec_model',
epoch=config.num_epoch,
save_optimizer_states=False,
)
# #################################
# # save predictions on input data
# #################################
# val_pred = model.predict(val_iter).asnumpy()
# np.save("../results/val_pred.npy", val_pred)
# np.save("../results/val_label.npy", y_valid)
|
jiajiechen/mxnet
|
example/multivariate_time_series/src/train.py
|
Python
|
apache-2.0
| 17,882
|
"""'Null' auth backend
This backend requires no authentication and permits everything. Useful for
testing, do not use in production."""
from hil import auth
import logging
from hil.rest import ContextLogger
logger = ContextLogger(logging.getLogger(__name__), {})
class NullAuthBackend(auth.AuthBackend):
"""A null authentication backend.
Authentication always succeeds, giving admin access.
"""
def authenticate(self):
# pylint: disable=missing-docstring
logger.info("successful authentication with null backend.")
return True
def _have_admin(self):
return True
def _have_project_access(self, project):
return True
def setup(*args, **kwargs):
"""Set a NullAuthBackend as the auth backend."""
auth.set_auth_backend(NullAuthBackend())
|
SahilTikale/haas
|
hil/ext/auth/null.py
|
Python
|
apache-2.0
| 816
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parses metadata from a .proto file
Parses metadata from a .proto file including various options and a list of top
level messages and enums declared within the proto file.
"""
import re
import string
def CamelCase(name):
result = []
cap_next = True
for ch in name:
if cap_next and ch.islower():
result.append(ch.upper())
elif ch.isalnum():
result.append(ch)
cap_next = not ch.isalpha()
return ''.join(result)
class ProtoMetadata:
"""Parses a proto file to extract options and other metadata."""
multiple_files = False
package = ''
java_package = ''
java_api_version = 2
java_alt_api_package = ''
outer_class = ''
optimize_for = 'SPEED'
def __init__(self):
self.messages = []
self.enums = []
def MatchOptions(line, data):
# package
match = re.match(r'package\s*([\w\.]+)\s*;', line)
if match:
data.package = match.group(1)
# java package
match = re.match(r'option\s+java_package\s*=\s*"([^"]+)', line)
if match:
data.java_package = match.group(1)
# outer classname
match = re.match(r'option\s+java_outer_classname\s*=\s*"([^"]+)"', line)
if match:
data.outer_class = match.group(1)
# multiple files?
match = re.match(r'option\s+java_multiple_files\s*=\s*(\S+)\s*;', line)
if match:
data.multiple_files = True if match.group(1).lower() == 'true' else False
match = re.match(r'option\s+optimize_for\s*=\s*(\S+)\s*;', line)
if match:
data.optimize_for = match.group(1)
def MatchTypes(line, data):
# messages and enums
match = re.match(r'\s*(message|enum)\s+(\S+)\s+{$', line)
if match:
if match.group(1) == 'message':
data.messages.append(match.group(2))
else:
data.enums.append(match.group(2))
def MatchGroups(line, data):
match = re.match(
r'\s*(required|optional|repeated)\s+group\s+(\S+)\s+=\s+\d+\s+{$', line)
if match:
data.messages.append(match.group(2))
def ParseProto(filename):
data = ProtoMetadata()
data.outer_class = CamelCase(filename.rsplit('/', 1)[-1].split('.', 1)[0])
with open(filename, 'r') as fh:
brace_depth = 0
inside_extend = False
for line in fh:
line = line.rstrip()
MatchOptions(line, data)
if brace_depth == 0 and re.match(r'\s*extend\s+\S+\s+{$', line):
inside_extend = True
brace_depth += 1
continue
# never emit anything for nested definitions
if brace_depth == 0:
MatchTypes(line, data)
elif brace_depth == 1 and inside_extend:
MatchGroups(line, data)
brace_depth += line.count('{')
brace_depth -= line.count('}')
if brace_depth == 0:
inside_extend = False
return data
|
life-beam/j2objc
|
scripts/parse_proto.py
|
Python
|
apache-2.0
| 3,256
|
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import subprocess
from fuelweb_test import logwrap
class Ebtables(object):
def __init__(self, target_devs, vlans):
super(Ebtables, self).__init__()
self.target_devs = target_devs
self.vlans = vlans
@logwrap
def restore_vlans(self):
for vlan in self.vlans:
for target_dev in self.target_devs:
Ebtables.restore_vlan(target_dev, vlan)
@logwrap
def restore_first_vlan(self):
for target_dev in self.target_devs:
Ebtables.restore_vlan(target_dev, self.vlans[0])
@logwrap
def block_first_vlan(self):
for target_dev in self.target_devs:
Ebtables.block_vlan(target_dev, self.vlans[0])
@staticmethod
@logwrap
def block_mac(mac):
return subprocess.check_output(
['sudo', 'ebtables', '-t', 'filter', '-A', 'FORWARD', '-s',
mac, '-j', 'DROP'],
stderr=subprocess.STDOUT
)
@staticmethod
@logwrap
def restore_mac(mac):
return subprocess.call(
[
'sudo', 'ebtables', '-t', 'filter',
'-D', 'FORWARD', '-s', mac, '-j', 'DROP'
],
stderr=subprocess.STDOUT,
)
@staticmethod
@logwrap
def restore_vlan(target_dev, vlan):
return subprocess.call(
[
'sudo', 'ebtables', '-t', 'broute', '-D', 'BROUTING', '-i',
target_dev, '-p', '8021Q', '--vlan-id', str(vlan), '-j', 'DROP'
],
stderr=subprocess.STDOUT,
)
@staticmethod
@logwrap
def block_vlan(target_dev, vlan):
return subprocess.check_output(
[
'sudo', 'ebtables', '-t', 'broute', '-A', 'BROUTING', '-i',
target_dev, '-p', '8021Q', '--vlan-id', str(vlan), '-j', 'DROP'
],
stderr=subprocess.STDOUT
)
|
ddepaoli3/fuel-main-dev
|
fuelweb_test/helpers/eb_tables.py
|
Python
|
apache-2.0
| 2,523
|
# coding: utf8
# $Id: __init__.py 7338 2012-02-03 12:22:14Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Miscellaneous utilities for the documentation utilities.
"""
__docformat__ = 'reStructuredText'
import sys
import os
import os.path
import warnings
import unicodedata
from docutils import ApplicationError, DataError
from docutils import nodes
from docutils.io import FileOutput
from docutils.error_reporting import ErrorOutput, SafeString
class SystemMessage(ApplicationError):
def __init__(self, system_message, level):
Exception.__init__(self, system_message.astext())
self.level = level
class SystemMessagePropagation(ApplicationError): pass
class Reporter:
"""
Info/warning/error reporter and ``system_message`` element generator.
Five levels of system messages are defined, along with corresponding
methods: `debug()`, `info()`, `warning()`, `error()`, and `severe()`.
There is typically one Reporter object per process. A Reporter object is
instantiated with thresholds for reporting (generating warnings) and
halting processing (raising exceptions), a switch to turn debug output on
or off, and an I/O stream for warnings. These are stored as instance
attributes.
When a system message is generated, its level is compared to the stored
thresholds, and a warning or error is generated as appropriate. Debug
messages are produced if the stored debug switch is on, independently of
other thresholds. Message output is sent to the stored warning stream if
not set to ''.
The Reporter class also employs a modified form of the "Observer" pattern
[GoF95]_ to track system messages generated. The `attach_observer` method
should be called before parsing, with a bound method or function which
accepts system messages. The observer can be removed with
`detach_observer`, and another added in its place.
.. [GoF95] Gamma, Helm, Johnson, Vlissides. *Design Patterns: Elements of
Reusable Object-Oriented Software*. Addison-Wesley, Reading, MA, USA,
1995.
"""
levels = 'DEBUG INFO WARNING ERROR SEVERE'.split()
"""List of names for system message levels, indexed by level."""
# system message level constants:
(DEBUG_LEVEL,
INFO_LEVEL,
WARNING_LEVEL,
ERROR_LEVEL,
SEVERE_LEVEL) = range(5)
def __init__(self, source, report_level, halt_level, stream=None,
debug=False, encoding=None, error_handler='backslashreplace'):
"""
:Parameters:
- `source`: The path to or description of the source data.
- `report_level`: The level at or above which warning output will
be sent to `stream`.
- `halt_level`: The level at or above which `SystemMessage`
exceptions will be raised, halting execution.
- `debug`: Show debug (level=0) system messages?
- `stream`: Where warning output is sent. Can be file-like (has a
``.write`` method), a string (file name, opened for writing),
'' (empty string) or `False` (for discarding all stream messages)
or `None` (implies `sys.stderr`; default).
- `encoding`: The output encoding.
- `error_handler`: The error handler for stderr output encoding.
"""
self.source = source
"""The path to or description of the source data."""
self.error_handler = error_handler
"""The character encoding error handler."""
self.debug_flag = debug
"""Show debug (level=0) system messages?"""
self.report_level = report_level
"""The level at or above which warning output will be sent
to `self.stream`."""
self.halt_level = halt_level
"""The level at or above which `SystemMessage` exceptions
will be raised, halting execution."""
if not isinstance(stream, ErrorOutput):
stream = ErrorOutput(stream, encoding, error_handler)
self.stream = stream
"""Where warning output is sent."""
self.encoding = encoding or getattr(stream, 'encoding', 'ascii')
"""The output character encoding."""
self.observers = []
"""List of bound methods or functions to call with each system_message
created."""
self.max_level = -1
"""The highest level system message generated so far."""
def set_conditions(self, category, report_level, halt_level,
stream=None, debug=False):
warnings.warn('docutils.utils.Reporter.set_conditions deprecated; '
'set attributes via configuration settings or directly',
DeprecationWarning, stacklevel=2)
self.report_level = report_level
self.halt_level = halt_level
if not isinstance(stream, ErrorOutput):
stream = ErrorOutput(stream, self.encoding, self.error_handler)
self.stream = stream
self.debug_flag = debug
def attach_observer(self, observer):
"""
The `observer` parameter is a function or bound method which takes one
argument, a `nodes.system_message` instance.
"""
self.observers.append(observer)
def detach_observer(self, observer):
self.observers.remove(observer)
def notify_observers(self, message):
for observer in self.observers:
observer(message)
def system_message(self, level, message, *children, **kwargs):
"""
Return a system_message object.
Raise an exception or generate a warning if appropriate.
"""
# `message` can be a `string`, `unicode`, or `Exception` instance.
if isinstance(message, Exception):
message = SafeString(message)
attributes = kwargs.copy()
if 'base_node' in kwargs:
source, line = get_source_line(kwargs['base_node'])
del attributes['base_node']
if source is not None:
attributes.setdefault('source', source)
if line is not None:
attributes.setdefault('line', line)
# assert source is not None, "node has line- but no source-argument"
if not 'source' in attributes: # 'line' is absolute line number
try: # look up (source, line-in-source)
source, line = self.get_source_and_line(attributes.get('line'))
# print "locator lookup", kwargs.get('line'), "->", source, line
except AttributeError:
source, line = None, None
if source is not None:
attributes['source'] = source
if line is not None:
attributes['line'] = line
# assert attributes['line'] is not None, (message, kwargs)
# assert attributes['source'] is not None, (message, kwargs)
attributes.setdefault('source', self.source)
msg = nodes.system_message(message, level=level,
type=self.levels[level],
*children, **attributes)
if self.stream and (level >= self.report_level
or self.debug_flag and level == self.DEBUG_LEVEL
or level >= self.halt_level):
self.stream.write(msg.astext() + '\n')
if level >= self.halt_level:
raise SystemMessage(msg, level)
if level > self.DEBUG_LEVEL or self.debug_flag:
self.notify_observers(msg)
self.max_level = max(level, self.max_level)
return msg
def debug(self, *args, **kwargs):
"""
Level-0, "DEBUG": an internal reporting issue. Typically, there is no
effect on the processing. Level-0 system messages are handled
separately from the others.
"""
if self.debug_flag:
return self.system_message(self.DEBUG_LEVEL, *args, **kwargs)
def info(self, *args, **kwargs):
"""
Level-1, "INFO": a minor issue that can be ignored. Typically there is
no effect on processing, and level-1 system messages are not reported.
"""
return self.system_message(self.INFO_LEVEL, *args, **kwargs)
def warning(self, *args, **kwargs):
"""
Level-2, "WARNING": an issue that should be addressed. If ignored,
there may be unpredictable problems with the output.
"""
return self.system_message(self.WARNING_LEVEL, *args, **kwargs)
def error(self, *args, **kwargs):
"""
Level-3, "ERROR": an error that should be addressed. If ignored, the
output will contain errors.
"""
return self.system_message(self.ERROR_LEVEL, *args, **kwargs)
def severe(self, *args, **kwargs):
"""
Level-4, "SEVERE": a severe error that must be addressed. If ignored,
the output will contain severe errors. Typically level-4 system
messages are turned into exceptions which halt processing.
"""
return self.system_message(self.SEVERE_LEVEL, *args, **kwargs)
class ExtensionOptionError(DataError): pass
class BadOptionError(ExtensionOptionError): pass
class BadOptionDataError(ExtensionOptionError): pass
class DuplicateOptionError(ExtensionOptionError): pass
def extract_extension_options(field_list, options_spec):
"""
Return a dictionary mapping extension option names to converted values.
:Parameters:
- `field_list`: A flat field list without field arguments, where each
field body consists of a single paragraph only.
- `options_spec`: Dictionary mapping known option names to a
conversion function such as `int` or `float`.
:Exceptions:
- `KeyError` for unknown option names.
- `ValueError` for invalid option values (raised by the conversion
function).
- `TypeError` for invalid option value types (raised by conversion
function).
- `DuplicateOptionError` for duplicate options.
- `BadOptionError` for invalid fields.
- `BadOptionDataError` for invalid option data (missing name,
missing data, bad quotes, etc.).
"""
option_list = extract_options(field_list)
option_dict = assemble_option_dict(option_list, options_spec)
return option_dict
def extract_options(field_list):
"""
Return a list of option (name, value) pairs from field names & bodies.
:Parameter:
`field_list`: A flat field list, where each field name is a single
word and each field body consists of a single paragraph only.
:Exceptions:
- `BadOptionError` for invalid fields.
- `BadOptionDataError` for invalid option data (missing name,
missing data, bad quotes, etc.).
"""
option_list = []
for field in field_list:
if len(field[0].astext().split()) != 1:
raise BadOptionError(
'extension option field name may not contain multiple words')
name = str(field[0].astext().lower())
body = field[1]
if len(body) == 0:
data = None
elif len(body) > 1 or not isinstance(body[0], nodes.paragraph) \
or len(body[0]) != 1 or not isinstance(body[0][0], nodes.Text):
raise BadOptionDataError(
'extension option field body may contain\n'
'a single paragraph only (option "%s")' % name)
else:
data = body[0][0].astext()
option_list.append((name, data))
return option_list
def assemble_option_dict(option_list, options_spec):
"""
Return a mapping of option names to values.
:Parameters:
- `option_list`: A list of (name, value) pairs (the output of
`extract_options()`).
- `options_spec`: Dictionary mapping known option names to a
conversion function such as `int` or `float`.
:Exceptions:
- `KeyError` for unknown option names.
- `DuplicateOptionError` for duplicate options.
- `ValueError` for invalid option values (raised by conversion
function).
- `TypeError` for invalid option value types (raised by conversion
function).
"""
options = {}
for name, value in option_list:
convertor = options_spec[name] # raises KeyError if unknown
if convertor is None:
raise KeyError(name) # or if explicitly disabled
if name in options:
raise DuplicateOptionError('duplicate option "%s"' % name)
try:
options[name] = convertor(value)
except (ValueError, TypeError), detail:
raise detail.__class__('(option: "%s"; value: %r)\n%s'
% (name, value, ' '.join(detail.args)))
return options
class NameValueError(DataError): pass
def decode_path(path):
"""
Ensure `path` is Unicode. Return `nodes.reprunicode` object.
Decode file/path string in a failsave manner if not already done.
"""
# see also http://article.gmane.org/gmane.text.docutils.user/2905
if isinstance(path, unicode):
return path
try:
path = path.decode(sys.getfilesystemencoding(), 'strict')
except AttributeError: # default value None has no decode method
return nodes.reprunicode(path)
except UnicodeDecodeError:
try:
path = path.decode('utf-8', 'strict')
except UnicodeDecodeError:
path = path.decode('ascii', 'replace')
return nodes.reprunicode(path)
def extract_name_value(line):
"""
Return a list of (name, value) from a line of the form "name=value ...".
:Exception:
`NameValueError` for invalid input (missing name, missing data, bad
quotes, etc.).
"""
attlist = []
while line:
equals = line.find('=')
if equals == -1:
raise NameValueError('missing "="')
attname = line[:equals].strip()
if equals == 0 or not attname:
raise NameValueError(
'missing attribute name before "="')
line = line[equals+1:].lstrip()
if not line:
raise NameValueError(
'missing value after "%s="' % attname)
if line[0] in '\'"':
endquote = line.find(line[0], 1)
if endquote == -1:
raise NameValueError(
'attribute "%s" missing end quote (%s)'
% (attname, line[0]))
if len(line) > endquote + 1 and line[endquote + 1].strip():
raise NameValueError(
'attribute "%s" end quote (%s) not followed by '
'whitespace' % (attname, line[0]))
data = line[1:endquote]
line = line[endquote+1:].lstrip()
else:
space = line.find(' ')
if space == -1:
data = line
line = ''
else:
data = line[:space]
line = line[space+1:].lstrip()
attlist.append((attname.lower(), data))
return attlist
def new_reporter(source_path, settings):
"""
Return a new Reporter object.
:Parameters:
`source` : string
The path to or description of the source text of the document.
`settings` : optparse.Values object
Runtime settings.
"""
reporter = Reporter(
source_path, settings.report_level, settings.halt_level,
stream=settings.warning_stream, debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
return reporter
def new_document(source_path, settings=None):
"""
Return a new empty document object.
:Parameters:
`source_path` : string
The path to or description of the source text of the document.
`settings` : optparse.Values object
Runtime settings. If none are provided, a default core set will
be used. If you will use the document object with any Docutils
components, you must provide their default settings as well. For
example, if parsing, at least provide the parser settings,
obtainable as follows::
settings = docutils.frontend.OptionParser(
components=(docutils.parsers.rst.Parser,)
).get_default_values()
"""
from docutils import frontend
if settings is None:
settings = frontend.OptionParser().get_default_values()
source_path = decode_path(source_path)
reporter = new_reporter(source_path, settings)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
return document
def clean_rcs_keywords(paragraph, keyword_substitutions):
if len(paragraph) == 1 and isinstance(paragraph[0], nodes.Text):
textnode = paragraph[0]
for pattern, substitution in keyword_substitutions:
match = pattern.search(textnode)
if match:
paragraph[0] = nodes.Text(pattern.sub(substitution, textnode))
return
def relative_path(source, target):
"""
Build and return a path to `target`, relative to `source` (both files).
If there is no common prefix, return the absolute path to `target`.
"""
source_parts = os.path.abspath(source or type(target)('dummy_file')
).split(os.sep)
target_parts = os.path.abspath(target).split(os.sep)
# Check first 2 parts because '/dir'.split('/') == ['', 'dir']:
if source_parts[:2] != target_parts[:2]:
# Nothing in common between paths.
# Return absolute path, using '/' for URLs:
return '/'.join(target_parts)
source_parts.reverse()
target_parts.reverse()
while (source_parts and target_parts
and source_parts[-1] == target_parts[-1]):
# Remove path components in common:
source_parts.pop()
target_parts.pop()
target_parts.reverse()
parts = ['..'] * (len(source_parts) - 1) + target_parts
return '/'.join(parts)
def get_stylesheet_reference(settings, relative_to=None):
"""
Retrieve a stylesheet reference from the settings object.
Deprecated. Use get_stylesheet_list() instead to
enable specification of multiple stylesheets as a comma-separated
list.
"""
if settings.stylesheet_path:
assert not settings.stylesheet, (
'stylesheet and stylesheet_path are mutually exclusive.')
if relative_to == None:
relative_to = settings._destination
return relative_path(relative_to, settings.stylesheet_path)
else:
return settings.stylesheet
# Return 'stylesheet' or 'stylesheet_path' arguments as list.
#
# The original settings arguments are kept unchanged: you can test
# with e.g. ``if settings.stylesheet_path:``
#
# Differences to ``get_stylesheet_reference``:
# * return value is a list
# * no re-writing of the path (and therefore no optional argument)
# (if required, use ``utils.relative_path(source, target)``
# in the calling script)
def get_stylesheet_list(settings):
"""
Retrieve list of stylesheet references from the settings object.
"""
assert not (settings.stylesheet and settings.stylesheet_path), (
'stylesheet and stylesheet_path are mutually exclusive.')
if settings.stylesheet_path:
sheets = settings.stylesheet_path.split(",")
elif settings.stylesheet:
sheets = settings.stylesheet.split(",")
else:
sheets = []
# strip whitespace (frequently occuring in config files)
return [sheet.strip(u' \t\n') for sheet in sheets]
def get_trim_footnote_ref_space(settings):
"""
Return whether or not to trim footnote space.
If trim_footnote_reference_space is not None, return it.
If trim_footnote_reference_space is None, return False unless the
footnote reference style is 'superscript'.
"""
if settings.trim_footnote_reference_space is None:
return hasattr(settings, 'footnote_references') and \
settings.footnote_references == 'superscript'
else:
return settings.trim_footnote_reference_space
def get_source_line(node):
"""
Return the "source" and "line" attributes from the `node` given or from
its closest ancestor.
"""
while node:
if node.source or node.line:
return node.source, node.line
node = node.parent
return None, None
def escape2null(text):
"""Return a string with escape-backslashes converted to nulls."""
parts = []
start = 0
while True:
found = text.find('\\', start)
if found == -1:
parts.append(text[start:])
return ''.join(parts)
parts.append(text[start:found])
parts.append('\x00' + text[found+1:found+2])
start = found + 2 # skip character after escape
def unescape(text, restore_backslashes=False):
"""
Return a string with nulls removed or restored to backslashes.
Backslash-escaped spaces are also removed.
"""
if restore_backslashes:
return text.replace('\x00', '\\')
else:
for sep in ['\x00 ', '\x00\n', '\x00']:
text = ''.join(text.split(sep))
return text
def strip_combining_chars(text):
if isinstance(text, str) and sys.version_info < (3,0):
return text
return u''.join([c for c in text if not unicodedata.combining(c)])
def find_combining_chars(text):
"""Return indices of all combining chars in Unicode string `text`.
>>> find_combining_chars(u'A t̆ab̆lĕ')
[3, 6, 9]
"""
if isinstance(text, str) and sys.version_info < (3,0):
return []
return [i for i,c in enumerate(text) if unicodedata.combining(c)]
def column_indices(text):
"""Indices of Unicode string `text` when skipping combining characters.
>>> column_indices(u'A t̆ab̆lĕ')
[0, 1, 2, 4, 5, 7, 8]
"""
# TODO: account for asian wide chars here instead of using dummy
# replacements in the tableparser?
string_indices = range(len(text))
for index in find_combining_chars(text):
string_indices[index] = None
return [i for i in string_indices if i is not None]
east_asian_widths = {'W': 2, # Wide
'F': 2, # Full-width (wide)
'Na': 1, # Narrow
'H': 1, # Half-width (narrow)
'N': 1, # Neutral (not East Asian, treated as narrow)
'A': 1} # Ambiguous (s/b wide in East Asian context,
# narrow otherwise, but that doesn't work)
"""Mapping of result codes from `unicodedata.east_asian_widt()` to character
column widths."""
def column_width(text):
"""Return the column width of text.
Correct ``len(text)`` for wide East Asian and combining Unicode chars.
"""
if isinstance(text, str) and sys.version_info < (3,0):
return len(text)
try:
width = sum([east_asian_widths[unicodedata.east_asian_width(c)]
for c in text])
except AttributeError: # east_asian_width() New in version 2.4.
width = len(text)
# correction for combining chars:
width -= len(find_combining_chars(text))
return width
def uniq(L):
r = []
for item in L:
if not item in r:
r.append(item)
return r
# by Li Daobing http://code.activestate.com/recipes/190465/
# since Python 2.6 there is also itertools.combinations()
def unique_combinations(items, n):
"""Return n-length tuples, in sorted order, no repeated elements"""
if n==0: yield []
else:
for i in xrange(len(items)-n+1):
for cc in unique_combinations(items[i+1:],n-1):
yield [items[i]]+cc
def normalize_language_tag(tag):
"""Return a list of normalized combinations for a `BCP 47` language tag.
Example:
>>> normalize_language_tag('de-AT-1901')
['de_at_1901', 'de_at', 'de_1901', 'de']
"""
# normalize:
tag = tag.lower().replace('-','_')
# find all combinations of subtags
taglist = []
base_tag= tag.split('_')[:1]
subtags = tag.split('_')[1:]
# print base_tag, subtags
for n in range(len(subtags), 0, -1):
for tags in unique_combinations(subtags, n):
# print tags
taglist.append('_'.join(base_tag + tags))
taglist += base_tag
return taglist
class DependencyList(object):
"""
List of dependencies, with file recording support.
Note that the output file is not automatically closed. You have
to explicitly call the close() method.
"""
def __init__(self, output_file=None, dependencies=[]):
"""
Initialize the dependency list, automatically setting the
output file to `output_file` (see `set_output()`) and adding
all supplied dependencies.
"""
self.set_output(output_file)
for i in dependencies:
self.add(i)
def set_output(self, output_file):
"""
Set the output file and clear the list of already added
dependencies.
`output_file` must be a string. The specified file is
immediately overwritten.
If output_file is '-', the output will be written to stdout.
If it is None, no file output is done when calling add().
"""
self.list = []
if output_file:
if output_file == '-':
of = None
else:
of = output_file
self.file = FileOutput(destination_path=of,
encoding='utf8', autoclose=False)
else:
self.file = None
def add(self, *filenames):
"""
If the dependency `filename` has not already been added,
append it to self.list and print it to self.file if self.file
is not None.
"""
for filename in filenames:
if not filename in self.list:
self.list.append(filename)
if self.file is not None:
self.file.write(filename+'\n')
def close(self):
"""
Close the output file.
"""
self.file.close()
self.file = None
def __repr__(self):
try:
output_file = self.file.name
except AttributeError:
output_file = None
return '%s(%r, %s)' % (self.__class__.__name__, output_file, self.list)
|
ddd332/presto
|
presto-docs/target/sphinx/docutils/utils/__init__.py
|
Python
|
apache-2.0
| 26,763
|
"""Cover for Shelly."""
from aioshelly import Block
from homeassistant.components.cover import (
ATTR_POSITION,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
SUPPORT_STOP,
CoverEntity,
)
from homeassistant.core import callback
from . import ShellyDeviceWrapper
from .const import COAP, DATA_CONFIG_ENTRY, DOMAIN
from .entity import ShellyBlockEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up cover for device."""
wrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][config_entry.entry_id][COAP]
blocks = [block for block in wrapper.device.blocks if block.type == "roller"]
if not blocks:
return
async_add_entities(ShellyCover(wrapper, block) for block in blocks)
class ShellyCover(ShellyBlockEntity, CoverEntity):
"""Switch that controls a cover block on Shelly devices."""
def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None:
"""Initialize light."""
super().__init__(wrapper, block)
self.control_result = None
self._supported_features = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_STOP
if self.wrapper.device.settings["rollers"][0]["positioning"]:
self._supported_features |= SUPPORT_SET_POSITION
@property
def is_closed(self):
"""If cover is closed."""
if self.control_result:
return self.control_result["current_pos"] == 0
return self.block.rollerPos == 0
@property
def current_cover_position(self):
"""Position of the cover."""
if self.control_result:
return self.control_result["current_pos"]
return self.block.rollerPos
@property
def is_closing(self):
"""Return if the cover is closing."""
if self.control_result:
return self.control_result["state"] == "close"
return self.block.roller == "close"
@property
def is_opening(self):
"""Return if the cover is opening."""
if self.control_result:
return self.control_result["state"] == "open"
return self.block.roller == "open"
@property
def supported_features(self):
"""Flag supported features."""
return self._supported_features
async def async_close_cover(self, **kwargs):
"""Close cover."""
self.control_result = await self.block.set_state(go="close")
self.async_write_ha_state()
async def async_open_cover(self, **kwargs):
"""Open cover."""
self.control_result = await self.block.set_state(go="open")
self.async_write_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
self.control_result = await self.block.set_state(
go="to_pos", roller_pos=kwargs[ATTR_POSITION]
)
self.async_write_ha_state()
async def async_stop_cover(self, **_kwargs):
"""Stop the cover."""
self.control_result = await self.block.set_state(go="stop")
self.async_write_ha_state()
@callback
def _update_callback(self):
"""When device updates, clear control result that overrides state."""
self.control_result = None
super()._update_callback()
|
tboyce1/home-assistant
|
homeassistant/components/shelly/cover.py
|
Python
|
apache-2.0
| 3,273
|
#!/usr/bin/env python
# Copyright (c) 2008, Donovan Preston
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import eventlet
import commands
import datetime
import errno
import logging
import optparse
import pprint
import signal
from eventlet.green import socket
import sys
from eventlet.green import time
import traceback
try:
import simplejson as json
except ImportError:
import json
import eventlet.backdoor
from eventlet.green import os
import spawning
import spawning.util
KEEP_GOING = True
RESTART_CONTROLLER = False
PANIC = False
DEFAULTS = {
'num_processes': 4,
'threadpool_workers': 4,
'watch': [],
'dev': True,
'host': '',
'port': 8080,
'max_memory': None,
}
def print_exc(msg="Exception occured!"):
print >>sys.stderr, "(%d) %s" % (os.getpid(), msg)
traceback.print_exc()
def environ():
env = os.environ.copy()
# to avoid duplicates in the new sys.path
revised_paths = set()
new_path = list()
for path in sys.path:
if os.path.exists(path) and path not in revised_paths:
revised_paths.add(path)
new_path.append(path)
current_directory = os.path.realpath('.')
if current_directory not in revised_paths:
new_path.append(current_directory)
env['PYTHONPATH'] = ':'.join(new_path)
return env
class Child(object):
def __init__(self, pid, kill_pipe):
self.pid = pid
self.kill_pipe = kill_pipe
self.active = True
self.forked_at = datetime.datetime.now()
class Controller(object):
sock = None
factory = None
args = None
config = None
children = None
keep_going = True
panic = False
log = None
controller_pid = None
num_processes = 0
def __init__(self, sock, factory, args, **kwargs):
self.sock = sock
self.factory = factory
self.config = spawning.util.named(factory)(args)
self.args = args
self.children = {}
self.log = logging.getLogger('Spawning')
if not kwargs.get('log_handler'):
self.log.addHandler(logging.StreamHandler())
self.log.setLevel(logging.DEBUG)
self.controller_pid = os.getpid()
self.num_processes = int(self.config.get('num_processes', 0))
self.started_at = datetime.datetime.now()
def spawn_children(self, number=1):
parent_pid = os.getpid()
self.log.debug('Controller.spawn_children(number=%d)' % number)
for i in range(number):
child_side, parent_side = os.pipe()
try:
child_pid = os.fork()
except:
print_exc('Could not fork child! Panic!')
### TODO: restart
if not child_pid: # child process
os.close(parent_side)
command = [sys.executable, '-c',
'import sys; from spawning import spawning_child; spawning_child.main()',
str(parent_pid),
str(self.sock.fileno()),
str(child_side),
self.factory,
json.dumps(self.args)]
if self.args['reload'] == 'dev':
command.append('--reload')
if self.args.get('ssl_private_key') and self.args.get('ssl_certificate'):
command.append('--ssl-private-key')
command.append(self.args.get('ssl_private_key'))
command.append('--ssl-certificate')
command.append(self.args.get('ssl_certificate'))
env = environ()
tpool_size = int(self.config.get('threadpool_workers', 0))
assert tpool_size >= 0, (tpool_size, 'Cannot have a negative --threads argument')
if not tpool_size in (0, 1):
env['EVENTLET_THREADPOOL_SIZE'] = str(tpool_size)
os.execve(sys.executable, command, env)
# controller process
os.close(child_side)
self.children[child_pid] = Child(child_pid, parent_side)
def children_count(self):
return len(self.children)
def runloop(self):
while self.keep_going:
eventlet.sleep(0.1)
## Only start the number of children we need
number = self.num_processes - self.children_count()
if number > 0:
self.log.debug('Should start %d new children', number)
self.spawn_children(number=number)
continue
if not self.children:
## If we don't yet have children, let's loop
continue
pid, result = None, None
try:
pid, result = os.wait()
except OSError, e:
if e.errno != errno.EINTR:
raise
if pid and self.children.get(pid):
try:
child = self.children.pop(pid)
os.close(child.kill_pipe)
except (IOError, OSError):
pass
if result:
signum = os.WTERMSIG(result)
exitcode = os.WEXITSTATUS(result)
self.log.info('(%s) Child died from signal %s with code %s',
pid, signum, exitcode)
def handle_sighup(self, *args, **kwargs):
''' Pass `no_restart` to prevent restarting the run loop '''
self.kill_children()
self.spawn_children(number=self.num_processes)
# TODO: nothing seems to use no_restart, can it be removed?
if not kwargs.get('no_restart', True):
self.runloop()
def kill_children(self):
for pid, child in self.children.items():
try:
os.write(child.kill_pipe, 'k')
child.active = False
# all maintenance of children's membership happens in runloop()
# as children die and os.wait() gets results
except OSError, e:
if e.errno != errno.EPIPE:
raise
def handle_deadlychild(self, *args, **kwargs):
if self.keep_going:
self.log.debug('A child intends to die, starting replacement before it dies')
self.spawn_children(number=1)
def run(self):
self.log.info('(%s) *** Controller starting at %s' % (self.controller_pid,
time.asctime()))
if self.config.get('pidfile'):
fd = open(self.config.get('pidfile'), 'w')
try:
fd.write('%s\n' % self.controller_pid)
finally:
fd.close()
spawning.setproctitle("spawn: controller " + self.args.get('argv_str', ''))
if self.sock is None:
self.sock = bind_socket(self.config)
signal.signal(signal.SIGHUP, self.handle_sighup)
signal.signal(signal.SIGUSR1, self.handle_deadlychild)
if self.config.get('status_port'):
from spawning.util import status
eventlet.spawn(status.Server, self,
self.config['status_host'], self.config['status_port'])
try:
self.runloop()
except KeyboardInterrupt:
self.keep_going = False
self.kill_children()
self.log.info('(%s) *** Controller exiting' % (self.controller_pid))
def bind_socket(config):
sleeptime = 0.5
host = config.get('host', '')
port = config.get('port', 8080)
for x in range(8):
try:
sock = eventlet.listen((host, port))
break
except socket.error, e:
if e[0] != errno.EADDRINUSE:
raise
print "(%s) socket %s:%s already in use, retrying after %s seconds..." % (
os.getpid(), host, port, sleeptime)
eventlet.sleep(sleeptime)
sleeptime *= 2
else:
print "(%s) could not bind socket %s:%s, dying." % (
os.getpid(), host, port)
sys.exit(1)
return sock
def set_process_owner(spec):
import pwd, grp
if ":" in spec:
user, group = spec.split(":", 1)
else:
user, group = spec, None
if group:
os.setgid(grp.getgrnam(group).gr_gid)
if user:
os.setuid(pwd.getpwnam(user).pw_uid)
return user, group
def start_controller(sock, factory, factory_args):
c = Controller(sock, factory, factory_args)
installGlobal(c)
c.run()
def main():
current_directory = os.path.realpath('.')
if current_directory not in sys.path:
sys.path.append(current_directory)
parser = optparse.OptionParser(description="Spawning is an easy-to-use and flexible wsgi server. It supports graceful restarting so that your site finishes serving any old requests while starting new processes to handle new requests with the new code. For the simplest usage, simply pass the dotted path to your wsgi application: 'spawn my_module.my_wsgi_app'", version=spawning.__version__)
parser.add_option('-v', '--verbose', dest='verbose', action='store_true', help='Display verbose configuration '
'information when starting up or restarting.')
parser.add_option("-f", "--factory", dest='factory', default='spawning.wsgi_factory.config_factory',
help="""Dotted path (eg mypackage.mymodule.myfunc) to a callable which takes a dictionary containing the command line arguments and figures out what needs to be done to start the wsgi application. Current valid values are: spawning.wsgi_factory.config_factory, spawning.paste_factory.config_factory, and spawning.django_factory.config_factory. The factory used determines what the required positional command line arguments will be. See the spawning.wsgi_factory module for documentation on how to write a new factory.
""")
parser.add_option("-i", "--host",
dest='host', default=DEFAULTS['host'],
help='The local ip address to bind.')
parser.add_option("-p", "--port",
dest='port', type='int', default=DEFAULTS['port'],
help='The local port address to bind.')
parser.add_option("-s", "--processes",
dest='processes', type='int', default=DEFAULTS['num_processes'],
help='The number of unix processes to start to use for handling web i/o.')
parser.add_option("-t", "--threads",
dest='threads', type='int', default=DEFAULTS['threadpool_workers'],
help="The number of posix threads to use for handling web requests. "
"If threads is 0, do not use threads but instead use eventlet's cooperative "
"greenlet-based microthreads, monkeypatching the socket and pipe operations which normally block "
"to cooperate instead. Note that most blocking database api modules will not "
"automatically cooperate.")
parser.add_option('-d', '--daemonize', dest='daemonize', action='store_true',
help="Daemonize after starting children.")
parser.add_option('-u', '--chuid', dest='chuid', metavar="ID",
help="Change user ID in daemon mode (and group ID if given, "
"separate with colon.)")
parser.add_option('--pidfile', dest='pidfile', metavar="FILE",
help="Write own process ID to FILE in daemon mode.")
parser.add_option('--stdout', dest='stdout', metavar="FILE",
help="Redirect stdout to FILE in daemon mode.")
parser.add_option('--stderr', dest='stderr', metavar="FILE",
help="Redirect stderr to FILE in daemon mode.")
parser.add_option('-w', '--watch', dest='watch', action='append',
help="Watch the given file's modification time. If the file changes, the web server will "
'restart gracefully, allowing old requests to complete in the old processes '
'while starting new processes with the latest code or configuration.')
## TODO Hook up the svn reloader again
parser.add_option("-r", "--reload",
type='str', dest='reload',
help='If --reload=dev is passed, reload any time '
'a loaded module or configuration file changes.')
parser.add_option('-l', '--access-log-file', dest='access_log_file', default=None,
help='The file to log access log lines to. If not given, log to stdout. Pass /dev/null to discard logs.')
parser.add_option('-c', '--coverage', dest='coverage', action='store_true',
help='If given, gather coverage data from the running program and make the '
'coverage report available from the /_coverage url. See the figleaf docs '
'for more info: http://darcs.idyll.org/~t/projects/figleaf/doc/')
parser.add_option('--sysinfo', dest='sysinfo', action='store_true',
help='If given, gather system information data and make the '
'report available from the /_sysinfo url.')
parser.add_option('-m', '--max-memory', dest='max_memory', type='int', default=0,
help='If given, the maximum amount of memory this instance of Spawning '
'is allowed to use. If all of the processes started by this Spawning controller '
'use more than this amount of memory, send a SIGHUP to the controller '
'to get the children to restart.')
parser.add_option('--backdoor', dest='backdoor', action='store_true',
help='Start a backdoor bound to localhost:3000')
parser.add_option('-a', '--max-age', dest='max_age', type='int',
help='If given, the maximum amount of time (in seconds) an instance of spawning_child '
'is allowed to run. Once this time limit has expired the child will'
'gracefully kill itself while the server starts a replacement.')
parser.add_option('--no-keepalive', dest='no_keepalive', action='store_true',
help='Disable HTTP/1.1 KeepAlive')
parser.add_option('-z', '--z-restart-args', dest='restart_args',
help='For internal use only')
parser.add_option('--status-port', dest='status_port', type='int', default=0,
help='If given, hosts a server status page at that port. Two pages are served: a human-readable HTML version at http://host:status_port/status, and a machine-readable version at http://host:status_port/status.json')
parser.add_option('--status-host', dest='status_host', type='string', default='',
help='If given, binds the server status page to the specified local ip address. Defaults to the same value as --host. If --status-port is not supplied, the status page will not be activated.')
parser.add_option('--ssl-certificate', dest='ssl_certificate', type='string', default='',
help='Absolute path to SSL certificate file.')
parser.add_option('--ssl-private-key', dest='ssl_private_key', type='string', default='',
help='Absolute path to SSL private key.')
options, positional_args = parser.parse_args()
if len(positional_args) < 1 and not options.restart_args:
parser.error("At least one argument is required. "
"For the default factory, it is the dotted path to the wsgi application "
"(eg my_package.my_module.my_wsgi_application). For the paste factory, it "
"is the ini file to load. Pass --help for detailed information about available options.")
if options.backdoor:
try:
eventlet.spawn(eventlet.backdoor.backdoor_server, eventlet.listen(('localhost', 3000)))
except Exception, ex:
sys.stderr.write('**> Error opening backdoor: %s\n' % ex)
sock = None
if options.restart_args:
restart_args = json.loads(options.restart_args)
factory = restart_args['factory']
factory_args = restart_args['factory_args']
start_delay = restart_args.get('start_delay')
if start_delay is not None:
factory_args['start_delay'] = start_delay
print "(%s) delaying startup by %s" % (os.getpid(), start_delay)
time.sleep(start_delay)
fd = restart_args.get('fd')
if fd is not None:
sock = socket.fromfd(restart_args['fd'], socket.AF_INET, socket.SOCK_STREAM)
## socket.fromfd doesn't result in a socket object that has the same fd.
## The old fd is still open however, so we close it so we don't leak.
os.close(restart_args['fd'])
return start_controller(sock, factory, factory_args)
## We're starting up for the first time.
if options.daemonize:
# Do the daemon dance. Note that this isn't what is considered good
# daemonization, because frankly it's convenient to keep the file
# descriptiors open (especially when there are prints scattered all
# over the codebase.)
# What we do instead is fork off, create a new session, fork again.
# This leaves the process group in a state without a session
# leader.
pid = os.fork()
if not pid:
os.setsid()
pid = os.fork()
if pid:
os._exit(0)
else:
os._exit(0)
print "(%s) now daemonized" % (os.getpid(),)
# Close _all_ open (and othewise!) files.
import resource
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY:
maxfd = 4096
for fdnum in xrange(maxfd):
try:
os.close(fdnum)
except OSError, e:
if e.errno != errno.EBADF:
raise
# Remap std{in,out,err}
devnull = os.open(os.path.devnull, os.O_RDWR)
oflags = os.O_WRONLY | os.O_CREAT | os.O_APPEND
if devnull != 0: # stdin
os.dup2(devnull, 0)
if options.stdout:
stdout_fd = os.open(options.stdout, oflags)
if stdout_fd != 1:
os.dup2(stdout_fd, 1)
os.close(stdout_fd)
else:
os.dup2(devnull, 1)
if options.stderr:
stderr_fd = os.open(options.stderr, oflags)
if stderr_fd != 2:
os.dup2(stderr_fd, 2)
os.close(stderr_fd)
else:
os.dup2(devnull, 2)
# Change user & group ID.
if options.chuid:
user, group = set_process_owner(options.chuid)
print "(%s) set user=%s group=%s" % (os.getpid(), user, group)
else:
# Become a process group leader only if not daemonizing.
os.setpgrp()
## Fork off the thing that watches memory for this process group.
controller_pid = os.getpid()
if options.max_memory and not os.fork():
env = environ()
from spawning import memory_watcher
basedir, cmdname = os.path.split(memory_watcher.__file__)
if cmdname.endswith('.pyc'):
cmdname = cmdname[:-1]
os.chdir(basedir)
command = [
sys.executable,
cmdname,
'--max-age', str(options.max_age),
str(controller_pid),
str(options.max_memory)]
os.execve(sys.executable, command, env)
factory = options.factory
# If you tell me to watch something, I'm going to reload then
if options.watch:
options.reload = True
if options.status_port == options.port:
options.status_port = None
sys.stderr.write('**> Status port cannot be the same as the service port, disabling status.\n')
factory_args = {
'verbose': options.verbose,
'host': options.host,
'port': options.port,
'num_processes': options.processes,
'threadpool_workers': options.threads,
'watch': options.watch,
'reload': options.reload,
'access_log_file': options.access_log_file,
'pidfile': options.pidfile,
'coverage': options.coverage,
'sysinfo': options.sysinfo,
'no_keepalive' : options.no_keepalive,
'max_age' : options.max_age,
'argv_str': " ".join(sys.argv[1:]),
'args': positional_args,
'status_port': options.status_port,
'status_host': options.status_host or options.host,
'ssl_private_key': options.ssl_private_key,
'ssl_certificate': options.ssl_certificate
}
start_controller(sock, factory, factory_args)
_global_attr_name_ = '_spawning_controller_'
def installGlobal(controller):
setattr(sys, _global_attr_name_, controller)
def globalController():
return getattr(sys, _global_attr_name_, None)
if __name__ == '__main__':
main()
|
2013Commons/HUE-SHARK
|
desktop/core/ext-py/Spawning-0.9.6/src/spawning/spawning_controller.py
|
Python
|
apache-2.0
| 21,469
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart import _
kind = "package"
name = _("DEB Directory")
description = _("""
Local directory with DEB packages.
""")
fields = [("path", _("Directory Path"), str, None,
_("Path of directory containing DEB packages.")),
("recursive", _("Recursive"), bool, False,
_("Search for files recursively."))]
|
blackPantherOS/packagemanagement
|
smartpm/smart/channels/deb_dir_info.py
|
Python
|
apache-2.0
| 1,215
|
# Copyright (C) 2014 VA Linux Systems Japan K.K.
# Copyright (C) 2014 Fumihiko Kakuma <kakuma at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib import constants as n_const
from neutron.tests.unit.plugins.ml2.drivers.l2pop.rpc_manager \
import l2population_rpc_base
class TestL2populationRpcCallBackTunnelMixin(
l2population_rpc_base.TestL2populationRpcCallBackTunnelMixinBase):
def test_get_agent_ports_no_data(self):
# Make sure vlan manager has no mappings that were added in setUp()
self.vlan_manager.mapping = {}
self.assertFalse(
list(self.fakeagent.get_agent_ports(self.fdb_entries1)))
def test_get_agent_ports_non_existence_key_in_lvm(self):
results = {}
self.vlan_manager.pop(self.lvms[1].net)
for lvm, agent_ports in self.fakeagent.get_agent_ports(
self.fdb_entries1):
results[lvm] = agent_ports
expected = {
self.lvm0: {
self.ports[0].ip: [(self.lvms[0].mac, self.lvms[0].ip)],
self.local_ip: []},
self.lvm2: {
self.ports[2].ip: [(self.lvms[2].mac, self.lvms[2].ip)],
self.local_ip: []},
}
self.assertEqual(expected, results)
def test_get_agent_ports_no_agent_ports(self):
results = {}
self.fdb_entries1[self.lvms[1].net]['ports'] = {}
for lvm, agent_ports in self.fakeagent.get_agent_ports(
self.fdb_entries1):
results[lvm] = agent_ports
expected = {
self.lvm0: {
self.ports[0].ip: [(self.lvms[0].mac, self.lvms[0].ip)],
self.local_ip: []},
self.lvm1: {},
self.lvm2: {
self.ports[2].ip: [(self.lvms[2].mac, self.lvms[2].ip)],
self.local_ip: []},
}
self.assertEqual(expected, results)
def test_fdb_add_tun(self):
with mock.patch.object(self.fakeagent, 'setup_tunnel_port'),\
mock.patch.object(self.fakeagent, 'add_fdb_flow'
) as mock_add_fdb_flow:
self.fakeagent.fdb_add_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr, (self.lvms[1].mac, self.lvms[1].ip),
self.ports[1].ip, self.lvm0, self.ports[1].ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_add_fdb_flow.call_args_list))
def test_fdb_add_tun_non_existence_key_in_ofports(self):
ofport = self.lvm0.network_type + '0a0a0a0a'
del self.ofports[self.type_gre][self.ports[1].ip]
with mock.patch.object(self.fakeagent, 'setup_tunnel_port',
return_value=ofport
) as mock_setup_tunnel_port,\
mock.patch.object(self.fakeagent, 'add_fdb_flow'
) as mock_add_fdb_flow:
self.fakeagent.fdb_add_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
mock_setup_tunnel_port.assert_called_once_with(
self.fakebr, self.ports[1].ip, self.lvm0.network_type)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr, (self.lvms[1].mac, self.lvms[1].ip),
self.ports[1].ip, self.lvm0, ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_add_fdb_flow.call_args_list))
def test_fdb_add_tun_unavailable_ofport(self):
del self.ofports[self.type_gre][self.ports[1].ip]
with mock.patch.object(self.fakeagent, 'setup_tunnel_port',
return_value=0
) as mock_setup_tunnel_port,\
mock.patch.object(self.fakeagent, 'add_fdb_flow'
) as mock_add_fdb_flow:
self.fakeagent.fdb_add_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
mock_setup_tunnel_port.assert_called_once_with(
self.fakebr, self.ports[1].ip, self.lvm0.network_type)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_add_fdb_flow.call_args_list))
def test_fdb_remove_tun(self):
with mock.patch.object(
self.fakeagent, 'del_fdb_flow') as mock_del_fdb_flow:
self.fakeagent.fdb_remove_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr, (self.lvms[1].mac, self.lvms[1].ip),
self.ports[1].ip, self.lvm0, self.ports[1].ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_del_fdb_flow.call_args_list))
def test_fdb_remove_tun_flooding_entry(self):
self.agent_ports[self.ports[1].ip] = [n_const.FLOODING_ENTRY]
with mock.patch.object(self.fakeagent, 'del_fdb_flow'
) as mock_del_fdb_flow,\
mock.patch.object(self.fakeagent, 'cleanup_tunnel_port'
) as mock_cleanup_tunnel_port:
self.fakeagent.fdb_remove_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr,
(n_const.FLOODING_ENTRY[0], n_const.FLOODING_ENTRY[1]),
self.ports[1].ip, self.lvm0, self.ports[1].ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_del_fdb_flow.call_args_list))
mock_cleanup_tunnel_port.assert_called_once_with(
self.fakebr, self.ports[1].ofport, self.lvm0.network_type)
def test_fdb_remove_tun_non_existence_key_in_ofports(self):
del self.ofports[self.type_gre][self.ports[1].ip]
with mock.patch.object(
self.fakeagent, 'del_fdb_flow') as mock_del_fdb_flow:
self.fakeagent.fdb_remove_tun('context', self.fakebr, self.lvm0,
self.agent_ports,
self._tunnel_port_lookup)
expected = [
mock.call(self.fakebr, (self.lvms[0].mac, self.lvms[0].ip),
self.ports[0].ip, self.lvm0, self.ports[0].ofport),
mock.call(self.fakebr, (self.lvms[2].mac, self.lvms[2].ip),
self.ports[2].ip, self.lvm0, self.ports[2].ofport),
]
self.assertEqual(sorted(expected),
sorted(mock_del_fdb_flow.call_args_list))
def test_fdb_update(self):
fake__fdb_chg_ip = mock.Mock()
self.fakeagent._fdb_chg_ip = fake__fdb_chg_ip
self.fakeagent.fdb_update('context', self.upd_fdb_entry1)
fake__fdb_chg_ip.assert_called_once_with(
'context', self.upd_fdb_entry1_val)
def test_fdb_update_non_existence_method(self):
self.assertRaises(NotImplementedError,
self.fakeagent.fdb_update,
'context', self.upd_fdb_entry1)
def test__fdb_chg_ip(self):
with mock.patch.object(
self.fakeagent,
'setup_entry_for_arp_reply') as m_setup_entry_for_arp_reply:
self.fakeagent.fdb_chg_ip_tun('context', self.fakebr,
self.upd_fdb_entry1_val,
self.local_ip)
expected = [
mock.call(self.fakebr, 'remove', self.lvm0.vlan, self.lvms[0].mac,
self.lvms[0].ip),
mock.call(self.fakebr, 'add', self.lvm0.vlan, self.lvms[1].mac,
self.lvms[1].ip),
mock.call(self.fakebr, 'remove', self.lvm0.vlan, self.lvms[0].mac,
self.lvms[0].ip),
mock.call(self.fakebr, 'add', self.lvm0.vlan, self.lvms[1].mac,
self.lvms[1].ip),
mock.call(self.fakebr, 'remove', self.lvm1.vlan, self.lvms[0].mac,
self.lvms[0].ip),
mock.call(self.fakebr, 'add', self.lvm1.vlan, self.lvms[2].mac,
self.lvms[2].ip),
]
m_setup_entry_for_arp_reply.assert_has_calls(expected, any_order=True)
def test__fdb_chg_ip_no_lvm(self):
m_setup_entry_for_arp_reply = mock.Mock()
self.fakeagent.setup_entry_for_arp_reply = m_setup_entry_for_arp_reply
self.fakeagent.fdb_chg_ip_tun(
'context', self.fakebr, self.upd_fdb_entry1, self.local_ip, {})
self.assertFalse(m_setup_entry_for_arp_reply.call_count)
def test__fdb_chg_ip_ip_is_local_ip(self):
upd_fdb_entry_val = {
self.lvms[0].net: {
self.local_ip: {
'before': [(self.lvms[0].mac, self.lvms[0].ip)],
'after': [(self.lvms[1].mac, self.lvms[1].ip)],
},
},
}
m_setup_entry_for_arp_reply = mock.Mock()
self.fakeagent.setup_entry_for_arp_reply = m_setup_entry_for_arp_reply
self.fakeagent.fdb_chg_ip_tun('context', self.fakebr,
upd_fdb_entry_val, self.local_ip)
self.assertFalse(m_setup_entry_for_arp_reply.call_count)
def test_fdb_chg_ip_tun_empty_before_after(self):
upd_fdb_entry_val = {
self.lvms[0].net: {
self.local_ip: {},
},
}
m_setup_entry_for_arp_reply = mock.Mock()
self.fakeagent.setup_entry_for_arp_reply = m_setup_entry_for_arp_reply
# passing non-local ip
self.fakeagent.fdb_chg_ip_tun('context', self.fakebr,
upd_fdb_entry_val, "8.8.8.8")
self.assertFalse(m_setup_entry_for_arp_reply.call_count)
|
sebrandon1/neutron
|
neutron/tests/unit/plugins/ml2/drivers/l2pop/rpc_manager/test_l2population_rpc.py
|
Python
|
apache-2.0
| 12,332
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_path_computation_path_objective_function import TapiPathComputationPathObjectiveFunction # noqa: F401,E501
from tapi_server.models.tapi_path_computation_path_service_end_point import TapiPathComputationPathServiceEndPoint # noqa: F401,E501
from tapi_server.models.tapi_path_computation_routing_constraint import TapiPathComputationRoutingConstraint # noqa: F401,E501
from tapi_server.models.tapi_path_computation_topology_constraint import TapiPathComputationTopologyConstraint # noqa: F401,E501
from tapi_server import util
class TapiPathComputationComputep2ppathInput(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, topology_constraint=None, routing_constraint=None, objective_function=None, sep=None): # noqa: E501
"""TapiPathComputationComputep2ppathInput - a model defined in OpenAPI
:param topology_constraint: The topology_constraint of this TapiPathComputationComputep2ppathInput. # noqa: E501
:type topology_constraint: TapiPathComputationTopologyConstraint
:param routing_constraint: The routing_constraint of this TapiPathComputationComputep2ppathInput. # noqa: E501
:type routing_constraint: TapiPathComputationRoutingConstraint
:param objective_function: The objective_function of this TapiPathComputationComputep2ppathInput. # noqa: E501
:type objective_function: TapiPathComputationPathObjectiveFunction
:param sep: The sep of this TapiPathComputationComputep2ppathInput. # noqa: E501
:type sep: List[TapiPathComputationPathServiceEndPoint]
"""
self.openapi_types = {
'topology_constraint': TapiPathComputationTopologyConstraint,
'routing_constraint': TapiPathComputationRoutingConstraint,
'objective_function': TapiPathComputationPathObjectiveFunction,
'sep': List[TapiPathComputationPathServiceEndPoint]
}
self.attribute_map = {
'topology_constraint': 'topology-constraint',
'routing_constraint': 'routing-constraint',
'objective_function': 'objective-function',
'sep': 'sep'
}
self._topology_constraint = topology_constraint
self._routing_constraint = routing_constraint
self._objective_function = objective_function
self._sep = sep
@classmethod
def from_dict(cls, dikt) -> 'TapiPathComputationComputep2ppathInput':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The tapi.path.computation.computep2ppath.Input of this TapiPathComputationComputep2ppathInput. # noqa: E501
:rtype: TapiPathComputationComputep2ppathInput
"""
return util.deserialize_model(dikt, cls)
@property
def topology_constraint(self):
"""Gets the topology_constraint of this TapiPathComputationComputep2ppathInput.
:return: The topology_constraint of this TapiPathComputationComputep2ppathInput.
:rtype: TapiPathComputationTopologyConstraint
"""
return self._topology_constraint
@topology_constraint.setter
def topology_constraint(self, topology_constraint):
"""Sets the topology_constraint of this TapiPathComputationComputep2ppathInput.
:param topology_constraint: The topology_constraint of this TapiPathComputationComputep2ppathInput.
:type topology_constraint: TapiPathComputationTopologyConstraint
"""
self._topology_constraint = topology_constraint
@property
def routing_constraint(self):
"""Gets the routing_constraint of this TapiPathComputationComputep2ppathInput.
:return: The routing_constraint of this TapiPathComputationComputep2ppathInput.
:rtype: TapiPathComputationRoutingConstraint
"""
return self._routing_constraint
@routing_constraint.setter
def routing_constraint(self, routing_constraint):
"""Sets the routing_constraint of this TapiPathComputationComputep2ppathInput.
:param routing_constraint: The routing_constraint of this TapiPathComputationComputep2ppathInput.
:type routing_constraint: TapiPathComputationRoutingConstraint
"""
self._routing_constraint = routing_constraint
@property
def objective_function(self):
"""Gets the objective_function of this TapiPathComputationComputep2ppathInput.
:return: The objective_function of this TapiPathComputationComputep2ppathInput.
:rtype: TapiPathComputationPathObjectiveFunction
"""
return self._objective_function
@objective_function.setter
def objective_function(self, objective_function):
"""Sets the objective_function of this TapiPathComputationComputep2ppathInput.
:param objective_function: The objective_function of this TapiPathComputationComputep2ppathInput.
:type objective_function: TapiPathComputationPathObjectiveFunction
"""
self._objective_function = objective_function
@property
def sep(self):
"""Gets the sep of this TapiPathComputationComputep2ppathInput.
none # noqa: E501
:return: The sep of this TapiPathComputationComputep2ppathInput.
:rtype: List[TapiPathComputationPathServiceEndPoint]
"""
return self._sep
@sep.setter
def sep(self, sep):
"""Sets the sep of this TapiPathComputationComputep2ppathInput.
none # noqa: E501
:param sep: The sep of this TapiPathComputationComputep2ppathInput.
:type sep: List[TapiPathComputationPathServiceEndPoint]
"""
self._sep = sep
|
karthik-sethuraman/ONFOpenTransport
|
RI/flask_server/tapi_server/models/tapi_path_computation_computep2ppath_input.py
|
Python
|
apache-2.0
| 5,982
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Defines some model classes related BGP.
These class include types used in saving information sent/received over BGP
sessions.
"""
import logging
from time import gmtime
LOG = logging.getLogger('bgpspeaker.model')
class Counter(object):
"""Simple counter for keeping count of several keys."""
def __init__(self):
self._counters = {}
def incr(self, counter_name, incr_by=1):
self._counters[counter_name] = \
self._counters.get(counter_name, 0) + incr_by
def get_count(self, counter_name):
return self._counters.get(counter_name, 0)
def get_counters(self):
return self._counters.copy()
class OutgoingRoute(object):
"""Holds state about a route that is queued for being sent to a given sink.
"""
__slots__ = ('_path', '_for_route_refresh',
'sink', 'next_outgoing_route', 'prev_outgoing_route',
'next_sink_out_route', 'prev_sink_out_route')
def __init__(self, path, for_route_refresh=False):
assert(path)
self.sink = None
self._path = path
# Is this update in response for route-refresh request.
# No sent-route is queued for the destination for this update.
self._for_route_refresh = for_route_refresh
# Automatically generated, for list off of Destination.
#
# self.next_outgoing_route
# self.prev_outgoing_route
# Automatically generated for list off of sink.
#
# self.next_sink_out_route
# self.prev_sink_out_route
@property
def path(self):
return self._path
@property
def for_route_refresh(self):
return self._for_route_refresh
def __str__(self):
return ('OutgoingRoute(path: %s, for_route_refresh: %s)' %
(self.path, self.for_route_refresh))
class FlexinetOutgoingRoute(object):
"""Holds state about a route that is queued for being sent to a given sink.
In this case the sink is flexinet peer and this route information is from
a VRF which holds Ipv4(v6) NLRIs.
"""
__slots__ = ('_path', 'sink', 'next_outgoing_route', 'prev_outgoing_route',
'next_sink_out_route', 'prev_sink_out_route', '_route_dist')
def __init__(self, path, route_dist):
from ryu.services.protocols.bgp.info_base.vrf4 import Vrf4Path
from ryu.services.protocols.bgp.info_base.vrf6 import Vrf6Path
from ryu.services.protocols.bgp.info_base.vrfevpn import VrfEvpnPath
from ryu.services.protocols.bgp.info_base.vrf4fs import (
Vrf4FlowSpecPath)
from ryu.services.protocols.bgp.info_base.vrf6fs import (
Vrf6FlowSpecPath)
from ryu.services.protocols.bgp.info_base.vrfl2vpnfs import (
L2vpnFlowSpecPath)
assert path.route_family in (Vrf4Path.ROUTE_FAMILY,
Vrf6Path.ROUTE_FAMILY,
VrfEvpnPath.ROUTE_FAMILY,
Vrf4FlowSpecPath.ROUTE_FAMILY,
Vrf6FlowSpecPath.ROUTE_FAMILY,
L2vpnFlowSpecPath.ROUTE_FAMILY,
)
self.sink = None
self._path = path
self._route_dist = route_dist
# Automatically generated, for list off of Destination.
#
# self.next_outgoing_route
# self.prev_outgoing_route
# Automatically generated for list off of sink.
#
# self.next_sink_out_route
# self.prev_sink_out_route
@property
def path(self):
return self._path
@property
def route_dist(self):
return self._route_dist
def __str__(self):
return ('FlexinetOutgoingRoute(path: %s, route_dist: %s)' %
(self.path, self.route_dist))
class SentRoute(object):
"""Holds the information that has been sent to one or more sinks
about a particular BGP destination.
"""
def __init__(self, path, peer, filtered=None, timestamp=None):
assert(path and hasattr(peer, 'version_num'))
self.path = path
# Peer to which this path was sent.
self._sent_peer = peer
self.filtered = filtered
if timestamp:
self.timestamp = timestamp
else:
self.timestamp = gmtime()
# Automatically generated.
#
# self.next_sent_route
# self.prev_sent_route
@property
def sent_peer(self):
return self._sent_peer
class ReceivedRoute(object):
"""Holds the information that has been received to one sinks
about a particular BGP destination.
"""
def __init__(self, path, peer, filtered=None, timestamp=None):
assert(path and hasattr(peer, 'version_num'))
self.path = path
# Peer to which this path was received.
self._received_peer = peer
self.filtered = filtered
if timestamp:
self.timestamp = timestamp
else:
self.timestamp = gmtime()
@property
def received_peer(self):
return self._received_peer
|
iwaseyusuke/ryu
|
ryu/services/protocols/bgp/model.py
|
Python
|
apache-2.0
| 5,796
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The WebDriver implementation."""
import base64
import copy
from contextlib import contextmanager
import warnings
from .command import Command
from .webelement import WebElement
from .remote_connection import RemoteConnection
from .errorhandler import ErrorHandler
from .switch_to import SwitchTo
from .mobile import Mobile
from .file_detector import FileDetector, LocalFileDetector
from selenium.common.exceptions import (InvalidArgumentException,
WebDriverException,
NoSuchCookieException,
UnknownMethodException)
from selenium.webdriver.common.by import By
from selenium.webdriver.common.html5.application_cache import ApplicationCache
try:
str = basestring
except NameError:
pass
_W3C_CAPABILITY_NAMES = frozenset([
'acceptInsecureCerts',
'browserName',
'browserVersion',
'platformName',
'pageLoadStrategy',
'proxy',
'setWindowRect',
'timeouts',
'unhandledPromptBehavior',
'strictFileInteractability'
])
_OSS_W3C_CONVERSION = {
'acceptSslCerts': 'acceptInsecureCerts',
'version': 'browserVersion',
'platform': 'platformName'
}
def _make_w3c_caps(caps):
"""Makes a W3C alwaysMatch capabilities object.
Filters out capability names that are not in the W3C spec. Spec-compliant
drivers will reject requests containing unknown capability names.
Moves the Firefox profile, if present, from the old location to the new Firefox
options object.
:Args:
- caps - A dictionary of capabilities requested by the caller.
"""
caps = copy.deepcopy(caps)
profile = caps.get('firefox_profile')
always_match = {}
if caps.get('proxy') and caps['proxy'].get('proxyType'):
caps['proxy']['proxyType'] = caps['proxy']['proxyType'].lower()
for k, v in caps.items():
if v and k in _OSS_W3C_CONVERSION:
always_match[_OSS_W3C_CONVERSION[k]] = v.lower() if k == 'platform' else v
if k in _W3C_CAPABILITY_NAMES or ':' in k:
always_match[k] = v
if profile:
moz_opts = always_match.get('moz:firefoxOptions', {})
# If it's already present, assume the caller did that intentionally.
if 'profile' not in moz_opts:
# Don't mutate the original capabilities.
new_opts = copy.deepcopy(moz_opts)
new_opts['profile'] = profile
always_match['moz:firefoxOptions'] = new_opts
return {"firstMatch": [{}], "alwaysMatch": always_match}
class WebDriver(object):
"""
Controls a browser by sending commands to a remote server.
This server is expected to be running the WebDriver wire protocol
as defined at
https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol
:Attributes:
- session_id - String ID of the browser session started and controlled by this WebDriver.
- capabilities - Dictionary of effective capabilities of this browser session as returned
by the remote server. See https://github.com/SeleniumHQ/selenium/wiki/DesiredCapabilities
- command_executor - remote_connection.RemoteConnection object used to execute commands.
- error_handler - errorhandler.ErrorHandler object used to handle errors.
"""
_web_element_cls = WebElement
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
desired_capabilities=None, browser_profile=None, proxy=None,
keep_alive=True, file_detector=None, options=None):
"""
Create a new driver that will issue commands using the wire protocol.
:Args:
- command_executor - Either a string representing URL of the remote server or a custom
remote_connection.RemoteConnection object. Defaults to 'http://127.0.0.1:4444/wd/hub'.
- desired_capabilities - A dictionary of capabilities to request when
starting the browser session. Required parameter.
- browser_profile - A selenium.webdriver.firefox.firefox_profile.FirefoxProfile object.
Only used if Firefox is requested. Optional.
- proxy - A selenium.webdriver.common.proxy.Proxy object. The browser session will
be started with given proxy settings, if possible. Optional.
- keep_alive - Whether to configure remote_connection.RemoteConnection to use
HTTP keep-alive. Defaults to True.
- file_detector - Pass custom file detector object during instantiation. If None,
then default LocalFileDetector() will be used.
- options - instance of a driver options.Options class
"""
capabilities = {}
if options is not None:
capabilities = options.to_capabilities()
if desired_capabilities is not None:
if not isinstance(desired_capabilities, dict):
raise WebDriverException("Desired Capabilities must be a dictionary")
else:
capabilities.update(desired_capabilities)
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(command_executor, keep_alive=keep_alive)
self._is_remote = True
self.session_id = None
self.capabilities = {}
self.error_handler = ErrorHandler()
self.start_client()
self.start_session(capabilities, browser_profile)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.file_detector = file_detector or LocalFileDetector()
def __repr__(self):
return '<{0.__module__}.{0.__name__} (session="{1}")>'.format(
type(self), self.session_id)
def __enter__(self):
return self
def __exit__(self, *args):
self.quit()
@contextmanager
def file_detector_context(self, file_detector_class, *args, **kwargs):
"""
Overrides the current file detector (if necessary) in limited context.
Ensures the original file detector is set afterwards.
Example:
with webdriver.file_detector_context(UselessFileDetector):
someinput.send_keys('/etc/hosts')
:Args:
- file_detector_class - Class of the desired file detector. If the class is different
from the current file_detector, then the class is instantiated with args and kwargs
and used as a file detector during the duration of the context manager.
- args - Optional arguments that get passed to the file detector class during
instantiation.
- kwargs - Keyword arguments, passed the same way as args.
"""
last_detector = None
if not isinstance(self.file_detector, file_detector_class):
last_detector = self.file_detector
self.file_detector = file_detector_class(*args, **kwargs)
try:
yield
finally:
if last_detector is not None:
self.file_detector = last_detector
@property
def mobile(self):
return self._mobile
@property
def name(self):
"""Returns the name of the underlying browser for this instance.
:Usage:
::
name = driver.name
"""
if 'browserName' in self.capabilities:
return self.capabilities['browserName']
else:
raise KeyError('browserName not specified in session capabilities')
def start_client(self):
"""
Called before starting a new session. This method may be overridden
to define custom startup behavior.
"""
pass
def stop_client(self):
"""
Called after executing a quit command. This method may be overridden
to define custom shutdown behavior.
"""
pass
def start_session(self, capabilities, browser_profile=None):
"""
Creates a new session with the desired capabilities.
:Args:
- browser_name - The name of the browser to request.
- version - Which browser version to request.
- platform - Which platform to request the browser on.
- javascript_enabled - Whether the new session should support JavaScript.
- browser_profile - A selenium.webdriver.firefox.firefox_profile.FirefoxProfile object. Only used if Firefox is requested.
"""
if not isinstance(capabilities, dict):
raise InvalidArgumentException("Capabilities must be a dictionary")
if browser_profile:
if "moz:firefoxOptions" in capabilities:
capabilities["moz:firefoxOptions"]["profile"] = browser_profile.encoded
else:
capabilities.update({'firefox_profile': browser_profile.encoded})
w3c_caps = _make_w3c_caps(capabilities)
parameters = {"capabilities": w3c_caps,
"desiredCapabilities": capabilities}
response = self.execute(Command.NEW_SESSION, parameters)
if 'sessionId' not in response:
response = response['value']
self.session_id = response['sessionId']
self.capabilities = response.get('value')
# if capabilities is none we are probably speaking to
# a W3C endpoint
if self.capabilities is None:
self.capabilities = response.get('capabilities')
# Double check to see if we have a W3C Compliant browser
self.w3c = response.get('status') is None
self.command_executor.w3c = self.w3c
def _wrap_value(self, value):
if isinstance(value, dict):
converted = {}
for key, val in value.items():
converted[key] = self._wrap_value(val)
return converted
elif isinstance(value, self._web_element_cls):
return {'ELEMENT': value.id, 'element-6066-11e4-a52e-4f735466cecf': value.id}
elif isinstance(value, list):
return list(self._wrap_value(item) for item in value)
else:
return value
def create_web_element(self, element_id):
"""Creates a web element with the specified `element_id`."""
return self._web_element_cls(self, element_id, w3c=self.w3c)
def _unwrap_value(self, value):
if isinstance(value, dict):
if 'ELEMENT' in value or 'element-6066-11e4-a52e-4f735466cecf' in value:
wrapped_id = value.get('ELEMENT', None)
if wrapped_id:
return self.create_web_element(value['ELEMENT'])
else:
return self.create_web_element(value['element-6066-11e4-a52e-4f735466cecf'])
else:
for key, val in value.items():
value[key] = self._unwrap_value(val)
return value
elif isinstance(value, list):
return list(self._unwrap_value(item) for item in value)
else:
return value
def execute(self, driver_command, params=None):
"""
Sends a command to be executed by a command.CommandExecutor.
:Args:
- driver_command: The name of the command to execute as a string.
- params: A dictionary of named parameters to send with the command.
:Returns:
The command's JSON response loaded into a dictionary object.
"""
if self.session_id is not None:
if not params:
params = {'sessionId': self.session_id}
elif 'sessionId' not in params:
params['sessionId'] = self.session_id
params = self._wrap_value(params)
response = self.command_executor.execute(driver_command, params)
if response:
self.error_handler.check_response(response)
response['value'] = self._unwrap_value(
response.get('value', None))
return response
# If the server doesn't send a response, assume the command was
# a success
return {'success': 0, 'value': None, 'sessionId': self.session_id}
def get(self, url):
"""
Loads a web page in the current browser session.
"""
self.execute(Command.GET, {'url': url})
@property
def title(self):
"""Returns the title of the current page.
:Usage:
::
title = driver.title
"""
resp = self.execute(Command.GET_TITLE)
return resp['value'] if resp['value'] is not None else ""
def find_element_by_id(self, id_):
"""Finds an element by id.
:Args:
- id\\_ - The id of the element to be found.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_id('foo')
"""
return self.find_element(by=By.ID, value=id_)
def find_elements_by_id(self, id_):
"""
Finds multiple elements by id.
:Args:
- id\\_ - The id of the elements to be found.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_id('foo')
"""
return self.find_elements(by=By.ID, value=id_)
def find_element_by_xpath(self, xpath):
"""
Finds an element by xpath.
:Args:
- xpath - The xpath locator of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_xpath('//div/td[1]')
"""
return self.find_element(by=By.XPATH, value=xpath)
def find_elements_by_xpath(self, xpath):
"""
Finds multiple elements by xpath.
:Args:
- xpath - The xpath locator of the elements to be found.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_xpath("//div[contains(@class, 'foo')]")
"""
return self.find_elements(by=By.XPATH, value=xpath)
def find_element_by_link_text(self, link_text):
"""
Finds an element by link text.
:Args:
- link_text: The text of the element to be found.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_link_text('Sign In')
"""
return self.find_element(by=By.LINK_TEXT, value=link_text)
def find_elements_by_link_text(self, text):
"""
Finds elements by link text.
:Args:
- link_text: The text of the elements to be found.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_link_text('Sign In')
"""
return self.find_elements(by=By.LINK_TEXT, value=text)
def find_element_by_partial_link_text(self, link_text):
"""
Finds an element by a partial match of its link text.
:Args:
- link_text: The text of the element to partially match on.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_partial_link_text('Sign')
"""
return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_elements_by_partial_link_text(self, link_text):
"""
Finds elements by a partial match of their link text.
:Args:
- link_text: The text of the element to partial match on.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_partial_link_text('Sign')
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_element_by_name(self, name):
"""
Finds an element by name.
:Args:
- name: The name of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_name('foo')
"""
return self.find_element(by=By.NAME, value=name)
def find_elements_by_name(self, name):
"""
Finds elements by name.
:Args:
- name: The name of the elements to find.
:Returns:
- list of webelement - a list with elements if any was found. an
empty list if not
:Usage:
::
elements = driver.find_elements_by_name('foo')
"""
return self.find_elements(by=By.NAME, value=name)
def find_element_by_tag_name(self, name):
"""
Finds an element by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_tag_name('h1')
"""
return self.find_element(by=By.TAG_NAME, value=name)
def find_elements_by_tag_name(self, name):
"""
Finds elements by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_tag_name('h1')
"""
return self.find_elements(by=By.TAG_NAME, value=name)
def find_element_by_class_name(self, name):
"""
Finds an element by class name.
:Args:
- name: The class name of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_class_name('foo')
"""
return self.find_element(by=By.CLASS_NAME, value=name)
def find_elements_by_class_name(self, name):
"""
Finds elements by class name.
:Args:
- name: The class name of the elements to find.
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_class_name('foo')
"""
return self.find_elements(by=By.CLASS_NAME, value=name)
def find_element_by_css_selector(self, css_selector):
"""
Finds an element by css selector.
:Args:
- css_selector - CSS selector string, ex: 'a.nav#home'
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_css_selector('#foo')
"""
return self.find_element(by=By.CSS_SELECTOR, value=css_selector)
def find_elements_by_css_selector(self, css_selector):
"""
Finds elements by css selector.
:Args:
- css_selector - CSS selector string, ex: 'a.nav#home'
:Returns:
- list of WebElement - a list with elements if any was found. An
empty list if not
:Usage:
::
elements = driver.find_elements_by_css_selector('.foo')
"""
return self.find_elements(by=By.CSS_SELECTOR, value=css_selector)
def execute_script(self, script, *args):
"""
Synchronously Executes JavaScript in the current window/frame.
:Args:
- script: The JavaScript to execute.
- \\*args: Any applicable arguments for your JavaScript.
:Usage:
::
driver.execute_script('return document.title;')
"""
converted_args = list(args)
command = None
if self.w3c:
command = Command.W3C_EXECUTE_SCRIPT
else:
command = Command.EXECUTE_SCRIPT
return self.execute(command, {
'script': script,
'args': converted_args})['value']
def execute_async_script(self, script, *args):
"""
Asynchronously Executes JavaScript in the current window/frame.
:Args:
- script: The JavaScript to execute.
- \\*args: Any applicable arguments for your JavaScript.
:Usage:
::
script = "var callback = arguments[arguments.length - 1]; " \\
"window.setTimeout(function(){ callback('timeout') }, 3000);"
driver.execute_async_script(script)
"""
converted_args = list(args)
if self.w3c:
command = Command.W3C_EXECUTE_SCRIPT_ASYNC
else:
command = Command.EXECUTE_ASYNC_SCRIPT
return self.execute(command, {
'script': script,
'args': converted_args})['value']
@property
def current_url(self):
"""
Gets the URL of the current page.
:Usage:
::
driver.current_url
"""
return self.execute(Command.GET_CURRENT_URL)['value']
@property
def page_source(self):
"""
Gets the source of the current page.
:Usage:
::
driver.page_source
"""
return self.execute(Command.GET_PAGE_SOURCE)['value']
def close(self):
"""
Closes the current window.
:Usage:
::
driver.close()
"""
self.execute(Command.CLOSE)
def quit(self):
"""
Quits the driver and closes every associated window.
:Usage:
::
driver.quit()
"""
try:
self.execute(Command.QUIT)
finally:
self.stop_client()
self.command_executor.close()
@property
def current_window_handle(self):
"""
Returns the handle of the current window.
:Usage:
::
driver.current_window_handle
"""
if self.w3c:
return self.execute(Command.W3C_GET_CURRENT_WINDOW_HANDLE)['value']
else:
return self.execute(Command.GET_CURRENT_WINDOW_HANDLE)['value']
@property
def window_handles(self):
"""
Returns the handles of all windows within the current session.
:Usage:
::
driver.window_handles
"""
if self.w3c:
return self.execute(Command.W3C_GET_WINDOW_HANDLES)['value']
else:
return self.execute(Command.GET_WINDOW_HANDLES)['value']
def maximize_window(self):
"""
Maximizes the current window that webdriver is using
"""
params = None
command = Command.W3C_MAXIMIZE_WINDOW
if not self.w3c:
command = Command.MAXIMIZE_WINDOW
params = {'windowHandle': 'current'}
self.execute(command, params)
def fullscreen_window(self):
"""
Invokes the window manager-specific 'full screen' operation
"""
self.execute(Command.FULLSCREEN_WINDOW)
def minimize_window(self):
"""
Invokes the window manager-specific 'minimize' operation
"""
self.execute(Command.MINIMIZE_WINDOW)
@property
def switch_to(self):
"""
:Returns:
- SwitchTo: an object containing all options to switch focus into
:Usage:
::
element = driver.switch_to.active_element
alert = driver.switch_to.alert
driver.switch_to.default_content()
driver.switch_to.frame('frame_name')
driver.switch_to.frame(1)
driver.switch_to.frame(driver.find_elements_by_tag_name("iframe")[0])
driver.switch_to.parent_frame()
driver.switch_to.window('main')
"""
return self._switch_to
# Navigation
def back(self):
"""
Goes one step backward in the browser history.
:Usage:
::
driver.back()
"""
self.execute(Command.GO_BACK)
def forward(self):
"""
Goes one step forward in the browser history.
:Usage:
::
driver.forward()
"""
self.execute(Command.GO_FORWARD)
def refresh(self):
"""
Refreshes the current page.
:Usage:
::
driver.refresh()
"""
self.execute(Command.REFRESH)
# Options
def get_cookies(self):
"""
Returns a set of dictionaries, corresponding to cookies visible in the current session.
:Usage:
::
driver.get_cookies()
"""
return self.execute(Command.GET_ALL_COOKIES)['value']
def get_cookie(self, name):
"""
Get a single cookie by name. Returns the cookie if found, None if not.
:Usage:
::
driver.get_cookie('my_cookie')
"""
if self.w3c:
try:
return self.execute(Command.GET_COOKIE, {'name': name})['value']
except NoSuchCookieException:
return None
else:
cookies = self.get_cookies()
for cookie in cookies:
if cookie['name'] == name:
return cookie
return None
def delete_cookie(self, name):
"""
Deletes a single cookie with the given name.
:Usage:
::
driver.delete_cookie('my_cookie')
"""
self.execute(Command.DELETE_COOKIE, {'name': name})
def delete_all_cookies(self):
"""
Delete all cookies in the scope of the session.
:Usage:
::
driver.delete_all_cookies()
"""
self.execute(Command.DELETE_ALL_COOKIES)
def add_cookie(self, cookie_dict):
"""
Adds a cookie to your current session.
:Args:
- cookie_dict: A dictionary object, with required keys - "name" and "value";
optional keys - "path", "domain", "secure", "expiry"
Usage:
driver.add_cookie({'name' : 'foo', 'value' : 'bar'})
driver.add_cookie({'name' : 'foo', 'value' : 'bar', 'path' : '/'})
driver.add_cookie({'name' : 'foo', 'value' : 'bar', 'path' : '/', 'secure':True})
"""
self.execute(Command.ADD_COOKIE, {'cookie': cookie_dict})
# Timeouts
def implicitly_wait(self, time_to_wait):
"""
Sets a sticky timeout to implicitly wait for an element to be found,
or a command to complete. This method only needs to be called one
time per session. To set the timeout for calls to
execute_async_script, see set_script_timeout.
:Args:
- time_to_wait: Amount of time to wait (in seconds)
:Usage:
::
driver.implicitly_wait(30)
"""
if self.w3c:
self.execute(Command.SET_TIMEOUTS, {
'implicit': int(float(time_to_wait) * 1000)})
else:
self.execute(Command.IMPLICIT_WAIT, {
'ms': float(time_to_wait) * 1000})
def set_script_timeout(self, time_to_wait):
"""
Set the amount of time that the script should wait during an
execute_async_script call before throwing an error.
:Args:
- time_to_wait: The amount of time to wait (in seconds)
:Usage:
::
driver.set_script_timeout(30)
"""
if self.w3c:
self.execute(Command.SET_TIMEOUTS, {
'script': int(float(time_to_wait) * 1000)})
else:
self.execute(Command.SET_SCRIPT_TIMEOUT, {
'ms': float(time_to_wait) * 1000})
def set_page_load_timeout(self, time_to_wait):
"""
Set the amount of time to wait for a page load to complete
before throwing an error.
:Args:
- time_to_wait: The amount of time to wait
:Usage:
::
driver.set_page_load_timeout(30)
"""
try:
self.execute(Command.SET_TIMEOUTS, {
'pageLoad': int(float(time_to_wait) * 1000)})
except WebDriverException:
self.execute(Command.SET_TIMEOUTS, {
'ms': float(time_to_wait) * 1000,
'type': 'page load'})
def find_element(self, by=By.ID, value=None):
"""
Find an element given a By strategy and locator. Prefer the find_element_by_* methods when
possible.
:Usage:
::
element = driver.find_element(By.ID, 'foo')
:rtype: WebElement
"""
if self.w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
return self.execute(Command.FIND_ELEMENT, {
'using': by,
'value': value})['value']
def find_elements(self, by=By.ID, value=None):
"""
Find elements given a By strategy and locator. Prefer the find_elements_by_* methods when
possible.
:Usage:
::
elements = driver.find_elements(By.CLASS_NAME, 'foo')
:rtype: list of WebElement
"""
if self.w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
# Return empty list if driver returns null
# See https://github.com/SeleniumHQ/selenium/issues/4555
return self.execute(Command.FIND_ELEMENTS, {
'using': by,
'value': value})['value'] or []
@property
def desired_capabilities(self):
"""
returns the drivers current desired capabilities being used
"""
return self.capabilities
def get_screenshot_as_file(self, filename):
"""
Saves a screenshot of the current window to a PNG image file. Returns
False if there is any IOError, else returns True. Use full paths in
your filename.
:Args:
- filename: The full path you wish to save your screenshot to. This
should end with a `.png` extension.
:Usage:
::
driver.get_screenshot_as_file('/Screenshots/foo.png')
"""
if not filename.lower().endswith('.png'):
warnings.warn("name used for saved screenshot does not match file "
"type. It should end with a `.png` extension", UserWarning)
png = self.get_screenshot_as_png()
try:
with open(filename, 'wb') as f:
f.write(png)
except IOError:
return False
finally:
del png
return True
def save_screenshot(self, filename):
"""
Saves a screenshot of the current window to a PNG image file. Returns
False if there is any IOError, else returns True. Use full paths in
your filename.
:Args:
- filename: The full path you wish to save your screenshot to. This
should end with a `.png` extension.
:Usage:
::
driver.save_screenshot('/Screenshots/foo.png')
"""
return self.get_screenshot_as_file(filename)
def get_screenshot_as_png(self):
"""
Gets the screenshot of the current window as a binary data.
:Usage:
::
driver.get_screenshot_as_png()
"""
return base64.b64decode(self.get_screenshot_as_base64().encode('ascii'))
def get_screenshot_as_base64(self):
"""
Gets the screenshot of the current window as a base64 encoded string
which is useful in embedded images in HTML.
:Usage:
::
driver.get_screenshot_as_base64()
"""
return self.execute(Command.SCREENSHOT)['value']
def set_window_size(self, width, height, windowHandle='current'):
"""
Sets the width and height of the current window. (window.resizeTo)
:Args:
- width: the width in pixels to set the window to
- height: the height in pixels to set the window to
:Usage:
::
driver.set_window_size(800,600)
"""
if self.w3c:
if windowHandle != 'current':
warnings.warn("Only 'current' window is supported for W3C compatibile browsers.")
self.set_window_rect(width=int(width), height=int(height))
else:
self.execute(Command.SET_WINDOW_SIZE, {
'width': int(width),
'height': int(height),
'windowHandle': windowHandle})
def get_window_size(self, windowHandle='current'):
"""
Gets the width and height of the current window.
:Usage:
::
driver.get_window_size()
"""
command = Command.GET_WINDOW_SIZE
if self.w3c:
if windowHandle != 'current':
warnings.warn("Only 'current' window is supported for W3C compatibile browsers.")
size = self.get_window_rect()
else:
size = self.execute(command, {'windowHandle': windowHandle})
if size.get('value', None) is not None:
size = size['value']
return {k: size[k] for k in ('width', 'height')}
def set_window_position(self, x, y, windowHandle='current'):
"""
Sets the x,y position of the current window. (window.moveTo)
:Args:
- x: the x-coordinate in pixels to set the window position
- y: the y-coordinate in pixels to set the window position
:Usage:
::
driver.set_window_position(0,0)
"""
if self.w3c:
if windowHandle != 'current':
warnings.warn("Only 'current' window is supported for W3C compatibile browsers.")
return self.set_window_rect(x=int(x), y=int(y))
else:
self.execute(Command.SET_WINDOW_POSITION,
{
'x': int(x),
'y': int(y),
'windowHandle': windowHandle
})
def get_window_position(self, windowHandle='current'):
"""
Gets the x,y position of the current window.
:Usage:
::
driver.get_window_position()
"""
if self.w3c:
if windowHandle != 'current':
warnings.warn("Only 'current' window is supported for W3C compatibile browsers.")
position = self.get_window_rect()
else:
position = self.execute(Command.GET_WINDOW_POSITION,
{'windowHandle': windowHandle})['value']
return {k: position[k] for k in ('x', 'y')}
def get_window_rect(self):
"""
Gets the x, y coordinates of the window as well as height and width of
the current window.
:Usage:
::
driver.get_window_rect()
"""
return self.execute(Command.GET_WINDOW_RECT)['value']
def set_window_rect(self, x=None, y=None, width=None, height=None):
"""
Sets the x, y coordinates of the window as well as height and width of
the current window. This method is only supported for W3C compatible
browsers; other browsers should use `set_window_position` and
`set_window_size`.
:Usage:
::
driver.set_window_rect(x=10, y=10)
driver.set_window_rect(width=100, height=200)
driver.set_window_rect(x=10, y=10, width=100, height=200)
"""
if not self.w3c:
raise UnknownMethodException("set_window_rect is only supported for W3C compatible browsers")
if (x is None and y is None) and (height is None and width is None):
raise InvalidArgumentException("x and y or height and width need values")
return self.execute(Command.SET_WINDOW_RECT, {"x": x, "y": y,
"width": width,
"height": height})['value']
@property
def file_detector(self):
return self._file_detector
@file_detector.setter
def file_detector(self, detector):
"""
Set the file detector to be used when sending keyboard input.
By default, this is set to a file detector that does nothing.
see FileDetector
see LocalFileDetector
see UselessFileDetector
:Args:
- detector: The detector to use. Must not be None.
"""
if detector is None:
raise WebDriverException("You may not set a file detector that is null")
if not isinstance(detector, FileDetector):
raise WebDriverException("Detector has to be instance of FileDetector")
self._file_detector = detector
@property
def orientation(self):
"""
Gets the current orientation of the device
:Usage:
::
orientation = driver.orientation
"""
return self.execute(Command.GET_SCREEN_ORIENTATION)['value']
@orientation.setter
def orientation(self, value):
"""
Sets the current orientation of the device
:Args:
- value: orientation to set it to.
:Usage:
::
driver.orientation = 'landscape'
"""
allowed_values = ['LANDSCAPE', 'PORTRAIT']
if value.upper() in allowed_values:
self.execute(Command.SET_SCREEN_ORIENTATION, {'orientation': value})
else:
raise WebDriverException("You can only set the orientation to 'LANDSCAPE' and 'PORTRAIT'")
@property
def application_cache(self):
""" Returns a ApplicationCache Object to interact with the browser app cache"""
return ApplicationCache(self)
@property
def log_types(self):
"""
Gets a list of the available log types. This only works with w3c compliant browsers.
:Usage:
::
driver.log_types
"""
return self.execute(Command.GET_AVAILABLE_LOG_TYPES)['value'] if self.w3c else []
def get_log(self, log_type):
"""
Gets the log for a given log type
:Args:
- log_type: type of log that which will be returned
:Usage:
::
driver.get_log('browser')
driver.get_log('driver')
driver.get_log('client')
driver.get_log('server')
"""
return self.execute(Command.GET_LOG, {'type': log_type})['value']
|
joshbruning/selenium
|
py/selenium/webdriver/remote/webdriver.py
|
Python
|
apache-2.0
| 41,544
|
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google BigQuery API."""
from google.cloud.client import ClientWithProject
from google.cloud.bigquery._http import Connection
from google.cloud.bigquery.dataset import Dataset
from google.cloud.bigquery.job import CopyJob
from google.cloud.bigquery.job import ExtractTableToStorageJob
from google.cloud.bigquery.job import LoadTableFromStorageJob
from google.cloud.bigquery.job import QueryJob
from google.cloud.bigquery.query import QueryResults
from google.cloud.iterator import HTTPIterator
class Project(object):
"""Wrapper for resource describing a BigQuery project.
:type project_id: str
:param project_id: Opaque ID of the project
:type numeric_id: int
:param numeric_id: Numeric ID of the project
:type friendly_name: str
:param friendly_name: Display name of the project
"""
def __init__(self, project_id, numeric_id, friendly_name):
self.project_id = project_id
self.numeric_id = numeric_id
self.friendly_name = friendly_name
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct an instance from a resource dict."""
return cls(
resource['id'], resource['numericId'], resource['friendlyName'])
class Client(ClientWithProject):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of. Will be
passed when creating a dataset / job. If not passed,
falls back to the default inferred from the environment.
:type credentials: :class:`~google.auth.credentials.Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
client. If not passed (and if no ``http`` object is
passed), falls back to the default inferred from the
environment.
:type http: :class:`~httplib2.Http`
:param http: (Optional) HTTP object to make requests. Can be any object
that defines ``request()`` with the same interface as
:meth:`~httplib2.Http.request`. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
SCOPE = ('https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a BigQuery consumer."""
def __init__(self, project=None, credentials=None, http=None):
super(Client, self).__init__(
project=project, credentials=credentials, http=http)
self._connection = Connection(self)
def list_projects(self, max_results=None, page_token=None):
"""List projects for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/projects/list
:type max_results: int
:param max_results: maximum number of projects to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of projects. If
not passed, the API will return the first page of
projects.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.client.Project`
accessible to the current client.
"""
return HTTPIterator(
client=self, path='/projects', item_to_value=_item_to_project,
items_key='projects', page_token=page_token,
max_results=max_results)
def list_datasets(self, include_all=False, max_results=None,
page_token=None):
"""List datasets for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/datasets/list
:type include_all: bool
:param include_all: True if results include hidden datasets.
:type max_results: int
:param max_results: maximum number of datasets to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of datasets. If
not passed, the API will return the first page of
datasets.
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterator of :class:`~google.cloud.bigquery.dataset.Dataset`.
accessible to the current client.
"""
extra_params = {}
if include_all:
extra_params['all'] = True
path = '/projects/%s/datasets' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_dataset,
items_key='datasets', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def dataset(self, dataset_name, project=None):
"""Construct a dataset bound to this client.
:type dataset_name: str
:param dataset_name: Name of the dataset.
:type project: str
:param project: (Optional) project ID for the dataset (defaults to
the project of the client).
:rtype: :class:`google.cloud.bigquery.dataset.Dataset`
:returns: a new ``Dataset`` instance
"""
return Dataset(dataset_name, client=self, project=project)
def job_from_resource(self, resource):
"""Detect correct job type from resource and instantiate.
:type resource: dict
:param resource: one job resource from API response
:rtype: One of:
:class:`google.cloud.bigquery.job.LoadTableFromStorageJob`,
:class:`google.cloud.bigquery.job.CopyJob`,
:class:`google.cloud.bigquery.job.ExtractTableToStorageJob`,
:class:`google.cloud.bigquery.job.QueryJob`,
:class:`google.cloud.bigquery.job.RunSyncQueryJob`
:returns: the job instance, constructed via the resource
"""
config = resource['configuration']
if 'load' in config:
return LoadTableFromStorageJob.from_api_repr(resource, self)
elif 'copy' in config:
return CopyJob.from_api_repr(resource, self)
elif 'extract' in config:
return ExtractTableToStorageJob.from_api_repr(resource, self)
elif 'query' in config:
return QueryJob.from_api_repr(resource, self)
raise ValueError('Cannot parse job resource')
def list_jobs(self, max_results=None, page_token=None, all_users=None,
state_filter=None):
"""List jobs for the project associated with this client.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs/list
:type max_results: int
:param max_results: maximum number of jobs to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of jobs. If
not passed, the API will return the first page of
jobs.
:type all_users: bool
:param all_users: if true, include jobs owned by all users in the
project.
:type state_filter: str
:param state_filter: if passed, include only jobs matching the given
state. One of
* ``"done"``
* ``"pending"``
* ``"running"``
:rtype: :class:`~google.cloud.iterator.Iterator`
:returns: Iterable of job instances.
"""
extra_params = {'projection': 'full'}
if all_users is not None:
extra_params['allUsers'] = all_users
if state_filter is not None:
extra_params['stateFilter'] = state_filter
path = '/projects/%s/jobs' % (self.project,)
return HTTPIterator(
client=self, path=path, item_to_value=_item_to_job,
items_key='jobs', page_token=page_token,
max_results=max_results, extra_params=extra_params)
def load_table_from_storage(self, job_name, destination, *source_uris):
"""Construct a job for loading data into a table from CloudStorage.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be loaded.
:type source_uris: sequence of string
:param source_uris: URIs of data files to be loaded; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.LoadTableFromStorageJob`
:returns: a new ``LoadTableFromStorageJob`` instance
"""
return LoadTableFromStorageJob(job_name, destination, source_uris,
client=self)
def copy_table(self, job_name, destination, *sources):
"""Construct a job for copying one or more tables into another table.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
:type job_name: str
:param job_name: Name of the job.
:type destination: :class:`google.cloud.bigquery.table.Table`
:param destination: Table into which data is to be copied.
:type sources: sequence of :class:`google.cloud.bigquery.table.Table`
:param sources: tables to be copied.
:rtype: :class:`google.cloud.bigquery.job.CopyJob`
:returns: a new ``CopyJob`` instance
"""
return CopyJob(job_name, destination, sources, client=self)
def extract_table_to_storage(self, job_name, source, *destination_uris):
"""Construct a job for extracting a table into Cloud Storage files.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract
:type job_name: str
:param job_name: Name of the job.
:type source: :class:`google.cloud.bigquery.table.Table`
:param source: table to be extracted.
:type destination_uris: sequence of string
:param destination_uris: URIs of CloudStorage file(s) into which
table data is to be extracted; in format
``gs://<bucket_name>/<object_name_or_glob>``.
:rtype: :class:`google.cloud.bigquery.job.ExtractTableToStorageJob`
:returns: a new ``ExtractTableToStorageJob`` instance
"""
return ExtractTableToStorageJob(job_name, source, destination_uris,
client=self)
def run_async_query(self, job_name, query,
udf_resources=(), query_parameters=()):
"""Construct a job for running a SQL query asynchronously.
See:
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
:type job_name: str
:param job_name: Name of the job.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: a new ``QueryJob`` instance
"""
return QueryJob(job_name, query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
def run_sync_query(self, query, udf_resources=(), query_parameters=()):
"""Run a SQL query synchronously.
:type query: str
:param query: SQL query to be executed
:type udf_resources: tuple
:param udf_resources: An iterable of
:class:`google.cloud.bigquery._helpers.UDFResource`
(empty by default)
:type query_parameters: tuple
:param query_parameters:
An iterable of
:class:`google.cloud.bigquery._helpers.AbstractQueryParameter`
(empty by default)
:rtype: :class:`google.cloud.bigquery.query.QueryResults`
:returns: a new ``QueryResults`` instance
"""
return QueryResults(query, client=self,
udf_resources=udf_resources,
query_parameters=query_parameters)
# pylint: disable=unused-argument
def _item_to_project(iterator, resource):
"""Convert a JSON project to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a project.
:rtype: :class:`.Project`
:returns: The next project in the page.
"""
return Project.from_api_repr(resource)
# pylint: enable=unused-argument
def _item_to_dataset(iterator, resource):
"""Convert a JSON dataset to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a dataset.
:rtype: :class:`.Dataset`
:returns: The next dataset in the page.
"""
return Dataset.from_api_repr(resource, iterator.client)
def _item_to_job(iterator, resource):
"""Convert a JSON job to the native object.
:type iterator: :class:`~google.cloud.iterator.Iterator`
:param iterator: The iterator that is currently in use.
:type resource: dict
:param resource: An item to be converted to a job.
:rtype: job instance.
:returns: The next job in the page.
"""
return iterator.client.job_from_resource(resource)
|
axbaretto/beam
|
sdks/python/.tox/py27gcp/lib/python2.7/site-packages/google/cloud/bigquery/client.py
|
Python
|
apache-2.0
| 15,061
|
from __future__ import division
from __future__ import print_function
from builtins import range
from past.utils import old_div
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import scipy
import matplotlib.pyplot as plt
import seaborn as sns
### Hyperparameters
NONTERMINAL_STATE_COUNT = 100
NOISE_AMOUNT = 0.1
TRAIN_STEPS = 10000
Q_ENSEMBLE_SIZE = 8
MODEL_ENSEMBLE_SIZE = 8
HORIZON = 5
TRIAL_N = 10
### Helper functions
initial_state = 0
terminal_state = NONTERMINAL_STATE_COUNT + 1
nonterminal_state_count = NONTERMINAL_STATE_COUNT
state_count = NONTERMINAL_STATE_COUNT + 1
final_reward = NONTERMINAL_STATE_COUNT
colors = sns.color_palette('husl', 4)
plt.rcParams["figure.figsize"] = (6,5)
def step(state):
if state == terminal_state: next_state = terminal_state
else: next_state = state + 1
if state == terminal_state: reward = 0
elif state+1 == terminal_state: reward = final_reward
else: reward = -1
return next_state, reward
def noisy_step(state):
if state == terminal_state: next_state = terminal_state
elif np.random.random([]) < NOISE_AMOUNT: next_state = np.random.randint(0, state_count)
else: next_state = state + 1
if state == terminal_state: reward = 0
elif state+1 == terminal_state: reward = final_reward
else: reward = -1
return next_state, reward
def get_error(Q):
losses = np.square(np.arange(state_count) - Q[:-1])
return np.mean(losses)
def downsample(array, factor):
pad_size = np.ceil(old_div(float(array.size),factor))*factor - array.size
array_padded = np.append(array, np.zeros([pad_size.astype(np.int64)])*np.NaN)
return scipy.nanmean(array_padded.reshape(-1,factor), axis=1)
######################
### Main experiments
######################
# Basic Q
if True:
print("Running basic Q-learning.")
trial_results = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[state_count+1]).astype(np.float64)
Q[state_count] = 0
losses = []
for step_i in range(TRAIN_STEPS):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
Q[state] = reward + Q[next_state]
losses.append(get_error(Q))
trial_results.append(losses)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="Basic Q-learning", color=colors[0])
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[0])
with open('Toy-v1/baseline.csv', 'w') as f:
data = []
for frame_i in range(result.shape[0]):
for loss in result[frame_i]:
data.append("%f,%f,%f,%f" % (frame_i, frame_i, frame_i, loss))
f.write("\n".join(data))
# Ensemble Q
if True:
print("Running ensemble Q-learning.")
trial_results = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[Q_ENSEMBLE_SIZE, state_count+1]).astype(np.float64)
Q[:, state_count] = 0
losses = []
for step_i in range(TRAIN_STEPS):
for q_ensemble_i in range(Q_ENSEMBLE_SIZE):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
Q[q_ensemble_i, state] = reward + np.mean(Q[:, next_state])
losses.append(get_error(np.mean(Q, axis=0)))
trial_results.append(losses)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="Ensemble Q-learning", color=colors[1])
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[1])
# Ensemble MVE-Oracle
if True:
print("Running ensemble oracle MVE.")
trial_results = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[Q_ENSEMBLE_SIZE, state_count+1]).astype(np.float64)
Q[:, state_count] = 0
losses = []
for step_i in range(TRAIN_STEPS):
for q_ensemble_i in range(Q_ENSEMBLE_SIZE):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
# MVE rollout
target = reward
for _ in range(HORIZON):
next_state, reward = step(next_state)
target += reward
target += np.mean(Q[:,next_state])
Q[q_ensemble_i, state] = target
losses.append(get_error(np.mean(Q, axis=0)))
trial_results.append(losses)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="MVE-oracle", color=colors[2])
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[2])
with open('Toy-v1/mve_oracle.csv', 'w') as f:
data = []
for frame_i in range(result.shape[0]):
for loss in result[frame_i]:
data.append("%f,%f,%f,%f" % (frame_i, frame_i, frame_i, loss))
f.write("\n".join(data))
# Ensemble MVE-Noisy
if True:
print("Running ensemble noisy MVE.")
trial_results = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[Q_ENSEMBLE_SIZE, state_count+1]).astype(np.float64)
Q[:, state_count] = 0
losses = []
for step_i in range(TRAIN_STEPS):
for q_ensemble_i in range(Q_ENSEMBLE_SIZE):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
# MVE rollout
targets = []
first_next_state, first_reward = next_state, reward
for model_ensemble_i in range(MODEL_ENSEMBLE_SIZE):
next_state, reward = first_next_state, first_reward
target = reward
for _ in range(HORIZON):
next_state, reward = noisy_step(next_state)
target += reward
target += np.mean(Q[:,next_state])
targets.append(target)
Q[q_ensemble_i, state] = np.mean(targets)
losses.append(get_error(np.mean(Q, axis=0)))
trial_results.append(losses)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="MVE-noisy", color=colors[2], linestyle='dotted')
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[2])
with open('Toy-v1/mve_noisy.csv', 'w') as f:
data = []
for frame_i in range(result.shape[0]):
for loss in result[frame_i]:
data.append("%f,%f,%f,%f" % (frame_i, frame_i, frame_i, loss))
f.write("\n".join(data))
# STEVE-Oracle
if True:
print("Running ensemble oracle STEVE.")
trial_results = []
oracle_q_estimate_errors = []
oracle_mve_estimate_errors = []
oracle_steve_estimate_errors = []
oracle_opt_estimate_errors = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[Q_ENSEMBLE_SIZE, state_count+1]).astype(np.float64)
Q[:, state_count] = 0
losses = []
q_estimate_errors = []
mve_estimate_errors = []
steve_estimate_errors = []
opt_estimate_errors = []
steve_beat_freq= []
for step_i in range(TRAIN_STEPS):
_q_estimate_errors = []
_mve_estimate_errors = []
_steve_estimate_errors = []
_opt_estimate_errors = []
_steve_beat_freq = []
for q_ensemble_i in range(Q_ENSEMBLE_SIZE):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
# STEVE rollout
Q_est_mat = np.zeros([HORIZON + 1, Q_ENSEMBLE_SIZE])
reward_est_mat = np.zeros([HORIZON + 1, 1])
first_next_state, first_reward = next_state, reward
next_state, reward = first_next_state, first_reward
Q_est_mat[0, :] = Q[:, next_state]
reward_est_mat[0, 0] = reward
for timestep_i in range(1,HORIZON+1):
next_state, reward = step(next_state)
Q_est_mat[timestep_i, :] = Q[:, next_state]
reward_est_mat[timestep_i, 0] = reward
all_targets = Q_est_mat + np.cumsum(reward_est_mat, axis=0)
# STEVE weight calculation
estimates = np.mean(all_targets, axis=1)
confidences = old_div(1., (np.var(all_targets, axis=1) + 1e-8))
coefficients = old_div(confidences, np.sum(confidences))
target = np.sum(estimates * coefficients)
Q[q_ensemble_i, state] = target
true_target = state + 1. if state != terminal_state else 0.
_q_estimate_errors.append(np.square(estimates[0] - true_target))
_mve_estimate_errors.append(np.square(estimates[-1] - true_target))
_steve_estimate_errors.append(np.square(np.sum(estimates * coefficients) - true_target))
_opt_estimate_errors.append(np.min(np.square(estimates - true_target)))
losses.append(get_error(np.mean(Q, axis=0)))
q_estimate_errors.append(np.mean(_q_estimate_errors))
mve_estimate_errors.append(np.mean(_mve_estimate_errors))
steve_estimate_errors.append(np.mean(_steve_estimate_errors))
opt_estimate_errors.append(np.mean(_opt_estimate_errors))
trial_results.append(losses)
oracle_q_estimate_errors.append(q_estimate_errors)
oracle_mve_estimate_errors.append(mve_estimate_errors)
oracle_steve_estimate_errors.append(steve_estimate_errors)
oracle_opt_estimate_errors.append(opt_estimate_errors)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="STEVE-oracle", color=colors[3])
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[3])
with open('Toy-v1/steve_oracle.csv', 'w') as f:
data = []
for frame_i in range(result.shape[0]):
for loss in result[frame_i]:
data.append("%f,%f,%f,%f" % (frame_i, frame_i, frame_i, loss))
f.write("\n".join(data))
# STEVE-Noisy
if True:
print("Running ensemble noisy STEVE.")
trial_results = []
noisy_q_estimate_errors = []
noisy_mve_estimate_errors = []
noisy_steve_estimate_errors = []
noisy_opt_estimate_errors = []
noisy_steve_beat_freq = []
for run_i in range(TRIAL_N):
print("Trial %d" % run_i)
Q = np.random.randint(0,state_count,[Q_ENSEMBLE_SIZE, state_count+1]).astype(np.float64)
Q[:, state_count] = 0
losses = []
q_estimate_errors = []
mve_estimate_errors = []
steve_estimate_errors = []
opt_estimate_errors = []
steve_beat_freq= []
for step_i in range(TRAIN_STEPS):
_q_estimate_errors = []
_mve_estimate_errors = []
_steve_estimate_errors = []
_opt_estimate_errors = []
_steve_beat_freq = []
for q_ensemble_i in range(Q_ENSEMBLE_SIZE):
state = np.random.randint(0,state_count)
next_state, reward = step(state)
# STEVE rollout
Q_est_mat = np.zeros([HORIZON + 1, MODEL_ENSEMBLE_SIZE, Q_ENSEMBLE_SIZE])
reward_est_mat = np.zeros([HORIZON + 1, MODEL_ENSEMBLE_SIZE, 1])
first_next_state, first_reward = next_state, reward
for model_ensemble_i in range(MODEL_ENSEMBLE_SIZE):
next_state, reward = first_next_state, first_reward
Q_est_mat[0, model_ensemble_i, :] = Q[:, next_state]
reward_est_mat[0, model_ensemble_i, 0] = reward
for timestep_i in range(1,HORIZON+1):
next_state, reward = noisy_step(next_state)
Q_est_mat[timestep_i, model_ensemble_i, :] = Q[:, next_state]
reward_est_mat[timestep_i, model_ensemble_i, 0] = reward
all_targets = Q_est_mat + np.cumsum(reward_est_mat, axis=0)
# STEVE weight calculation
all_targets = np.reshape(all_targets, [HORIZON+1, MODEL_ENSEMBLE_SIZE * Q_ENSEMBLE_SIZE])
estimates = np.mean(all_targets, axis=1)
confidences = old_div(1., (np.var(all_targets, axis=1) + 1e-8))
coefficients = old_div(confidences, np.sum(confidences))
target = np.sum(estimates * coefficients)
# target = estimates[0]
Q[q_ensemble_i, state] = target
true_target = state + 1. if state != terminal_state else 0.
_q_estimate_errors.append(np.square(estimates[0] - true_target))
_mve_estimate_errors.append(np.square(estimates[-1] - true_target))
_steve_estimate_errors.append(np.square(np.sum(estimates * coefficients) - true_target))
_opt_estimate_errors.append(np.min(np.square(estimates - true_target)))
_steve_beat_freq.append(float(np.square(estimates[0] - true_target) > np.square(target - true_target)))
losses.append(get_error(np.mean(Q, axis=0)))
q_estimate_errors.append(np.mean(_q_estimate_errors))
mve_estimate_errors.append(np.mean(_mve_estimate_errors))
steve_estimate_errors.append(np.mean(_steve_estimate_errors))
opt_estimate_errors.append(np.mean(_opt_estimate_errors))
steve_beat_freq.append(np.mean(_steve_beat_freq))
trial_results.append(losses)
noisy_q_estimate_errors.append(q_estimate_errors)
noisy_mve_estimate_errors.append(mve_estimate_errors)
noisy_steve_estimate_errors.append(steve_estimate_errors)
noisy_opt_estimate_errors.append(opt_estimate_errors)
noisy_steve_beat_freq.append(steve_beat_freq)
print("...complete.\n")
result = np.stack(trial_results, axis=1)
means = np.mean(result, axis=1)
stdevs = np.std(result, axis=1)
plt.plot(means, label="STEVE-noisy", color=colors[3], linestyle='dotted')
plt.fill_between(np.arange(TRAIN_STEPS), means - stdevs, means + stdevs, alpha=.2, color=colors[3])
with open('Toy-v1/steve_noisy.csv', 'w') as f:
data = []
for frame_i in range(result.shape[0]):
for loss in result[frame_i]:
data.append("%f,%f,%f,%f" % (frame_i, frame_i, frame_i, loss))
f.write("\n".join(data))
# ### Display results
# plt.title("Comparison of convergence rates")
# plt.legend()
# plt.savefig("comparison.pdf")
# plt.show()
#
# ### Display secondary results - error comparison
# DOWNSAMPLE = 50
# colors = sns.color_palette('husl', 8)
# for i, (error_curve, label) in enumerate([
# (oracle_q_estimate_errors, "Oracle Q error"),
# (oracle_mve_estimate_errors, "Oracle MVE error"),
# (oracle_steve_estimate_errors, "Oracle STEVE error"),
# # (oracle_opt_estimate_errors, "Oracle minimum single-estimate error"),
# ]):
# result = np.stack(error_curve, axis=1)
# means = downsample(np.mean(result, axis=1), DOWNSAMPLE)
# stdevs = downsample(np.std(result, axis=1), DOWNSAMPLE)
# plt.plot(means, label=label, color=colors[i])
# plt.fill_between(np.arange(means.shape[0]), means - stdevs, means + stdevs, alpha=.2, color=colors[i])
#
# plt.title("Comparison of errors for oracle dynamics")
# plt.legend()
# plt.show()
#
# for i, (error_curve, label) in enumerate([
# (noisy_q_estimate_errors, "Noisy Q error"),
# (noisy_mve_estimate_errors, "Noisy MVE error"),
# (noisy_steve_estimate_errors, "Noisy STEVE error"),
# # (noisy_opt_estimate_errors, "Noisy minimum single-estimate error"),
# # (trial_steve_beat_freq, "STEVE beat freq"),
# ]):
# result = np.stack(error_curve, axis=1)
# means = downsample(np.mean(result, axis=1), DOWNSAMPLE)
# stdevs = downsample(np.std(result, axis=1), DOWNSAMPLE)
# plt.plot(means, label=label, color=colors[i])
# plt.fill_between(np.arange(means.shape[0]), means - stdevs, means + stdevs, alpha=.2, color=colors[i])
#
# plt.title("Comparison of errors for noisy dynamics")
# plt.legend()
# plt.show()
|
tombstone/models
|
research/steve/toy_demo.py
|
Python
|
apache-2.0
| 16,727
|
# Copyright 2009 Owen Taylor
#
# This file is part of Reinteract and distributed under the terms
# of the BSD license. See the file COPYING in the Reinteract
# distribution for full details.
#
########################################################################
import gtk
from global_settings import global_settings
from window_builder import WindowBuilder
class PreferencesBuilder(WindowBuilder):
def __init__(self):
WindowBuilder.__init__(self, 'preferences')
self.dialog.connect('response', self.__on_response)
self.dialog.connect('delete-event', self.__on_delete_event)
global_settings.connect('notify::editor-font-is-custom', self.__on_notify_editor_font_is_custom)
self.__on_notify_editor_font_is_custom()
self.editor_font_custom_check_button.connect('toggled', self.__on_editor_font_custom_check_button_toggled)
self.__on_editor_font_custom_check_button_toggled()
global_settings.connect('notify::editor-font-name', self.__on_notify_editor_font_name)
self.__on_notify_editor_font_name()
self.editor_font_button.connect('font-set', self.__on_editor_font_set)
global_settings.connect('notify::doc-tooltip-font-is-custom', self.__on_notify_doc_tooltip_font_is_custom)
self.__on_notify_doc_tooltip_font_is_custom()
self.doc_tooltip_font_custom_check_button.connect('toggled', self.__on_doc_tooltip_font_custom_check_button_toggled)
self.__on_doc_tooltip_font_custom_check_button_toggled()
global_settings.connect('notify::doc-tooltip-font-name', self.__on_notify_doc_tooltip_font_name)
self.__on_notify_doc_tooltip_font_name()
self.doc_tooltip_font_button.connect('font-set', self.__on_doc_tooltip_font_set)
global_settings.connect('notify::autocomplete', self.__on_notify_autocomplete)
self.__on_notify_autocomplete()
self.autocomplete_check_button.connect('toggled', self.__on_autocomplete_check_button_toggled)
def __on_notify_editor_font_is_custom(self, *args):
self.editor_font_custom_check_button.set_active(global_settings.editor_font_is_custom)
def __on_notify_editor_font_name(self, *args):
self.editor_font_button.set_font_name(global_settings.editor_font_name)
def __on_editor_font_custom_check_button_toggled(self, *args):
font_is_custom = self.editor_font_custom_check_button.get_active()
self.editor_font_button.set_sensitive(font_is_custom)
if font_is_custom != global_settings.editor_font_is_custom:
global_settings.editor_font_is_custom = font_is_custom
def __on_editor_font_set(self, font_button):
font_name = font_button.get_font_name()
if font_name != global_settings.editor_font_name:
global_settings.editor_font_name = font_name
def __on_notify_doc_tooltip_font_is_custom(self, *args):
self.doc_tooltip_font_custom_check_button.set_active(global_settings.doc_tooltip_font_is_custom)
def __on_notify_doc_tooltip_font_name(self, *args):
self.doc_tooltip_font_button.set_font_name(global_settings.doc_tooltip_font_name)
def __on_doc_tooltip_font_custom_check_button_toggled(self, *args):
font_is_custom = self.doc_tooltip_font_custom_check_button.get_active()
self.doc_tooltip_font_button.set_sensitive(font_is_custom)
if font_is_custom != global_settings.doc_tooltip_font_is_custom:
global_settings.doc_tooltip_font_is_custom = font_is_custom
def __on_doc_tooltip_font_set(self, font_button):
font_name = font_button.get_font_name()
if font_name != global_settings.doc_tooltip_font_name:
global_settings.doc_tooltip_font_name = font_name
def __on_notify_autocomplete(self, *args):
self.autocomplete_check_button.set_active(global_settings.autocomplete)
def __on_autocomplete_check_button_toggled(self, *args):
autocomplete = self.autocomplete_check_button.get_active()
if autocomplete != global_settings.autocomplete:
global_settings.autocomplete = autocomplete
def __on_response(self, dialog, response_id):
self.dialog.hide()
def __on_delete_event(self, dialog, event):
self.dialog.hide()
return True
_builder = None
def show_preferences(parent=None):
global _builder
if not _builder:
_builder = PreferencesBuilder()
_builder.dialog.set_transient_for(parent)
if _builder.dialog.flags() & gtk.VISIBLE == 0:
_builder.dialog.show()
else:
_builder.dialog.present_with_time(gtk.get_current_event_time())
|
rschroll/reinteract
|
lib/reinteract/preferences_dialog.py
|
Python
|
bsd-2-clause
| 4,634
|
from __future__ import absolute_import
from django.db import connection, connections, transaction, DEFAULT_DB_ALIAS, DatabaseError
from django.db.transaction import commit_on_success, commit_manually, TransactionManagementError
from django.test import TransactionTestCase, skipUnlessDBFeature
from django.test.utils import override_settings
from django.utils.unittest import skipIf, skipUnless
from .models import Mod, M2mA, M2mB
class TestTransactionClosing(TransactionTestCase):
"""
Tests to make sure that transactions are properly closed
when they should be, and aren't left pending after operations
have been performed in them. Refs #9964.
"""
def test_raw_committed_on_success(self):
"""
Make sure a transaction consisting of raw SQL execution gets
committed by the commit_on_success decorator.
"""
@commit_on_success
def raw_sql():
"Write a record using raw sql under a commit_on_success decorator"
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (17,18)")
raw_sql()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
try:
# Check that the record is in the DB
obj = Mod.objects.get(pk=17)
self.assertEqual(obj.fld, 18)
except Mod.DoesNotExist:
self.fail("transaction with raw sql not committed")
def test_commit_manually_enforced(self):
"""
Make sure that under commit_manually, even "read-only" transaction require closure
(commit or rollback), and a transaction left pending is treated as an error.
"""
@commit_manually
def non_comitter():
"Execute a managed transaction with read-only operations and fail to commit"
_ = Mod.objects.count()
self.assertRaises(TransactionManagementError, non_comitter)
def test_commit_manually_commit_ok(self):
"""
Test that under commit_manually, a committed transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def committer():
"""
Perform a database query, then commit the transaction
"""
_ = Mod.objects.count()
transaction.commit()
try:
committer()
except TransactionManagementError:
self.fail("Commit did not clear the transaction state")
def test_commit_manually_rollback_ok(self):
"""
Test that under commit_manually, a rolled-back transaction is accepted by the transaction
management mechanisms
"""
@commit_manually
def roller_back():
"""
Perform a database query, then rollback the transaction
"""
_ = Mod.objects.count()
transaction.rollback()
try:
roller_back()
except TransactionManagementError:
self.fail("Rollback did not clear the transaction state")
def test_commit_manually_enforced_after_commit(self):
"""
Test that under commit_manually, if a transaction is committed and an operation is
performed later, we still require the new transaction to be closed
"""
@commit_manually
def fake_committer():
"Query, commit, then query again, leaving with a pending transaction"
_ = Mod.objects.count()
transaction.commit()
_ = Mod.objects.count()
self.assertRaises(TransactionManagementError, fake_committer)
@skipUnlessDBFeature('supports_transactions')
def test_reuse_cursor_reference(self):
"""
Make sure transaction closure is enforced even when the queries are performed
through a single cursor reference retrieved in the beginning
(this is to show why it is wrong to set the transaction dirty only when a cursor
is fetched from the connection).
"""
@commit_on_success
def reuse_cursor_ref():
"""
Fetch a cursor, perform an query, rollback to close the transaction,
then write a record (in a new transaction) using the same cursor object
(reference). All this under commit_on_success, so the second insert should
be committed.
"""
cursor = connection.cursor()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)")
transaction.rollback()
cursor.execute("INSERT into transactions_regress_mod (id,fld) values (1,2)")
reuse_cursor_ref()
# Rollback so that if the decorator didn't commit, the record is unwritten
transaction.rollback()
try:
# Check that the record is in the DB
obj = Mod.objects.get(pk=1)
self.assertEqual(obj.fld, 2)
except Mod.DoesNotExist:
self.fail("After ending a transaction, cursor use no longer sets dirty")
def test_failing_query_transaction_closed(self):
"""
Make sure that under commit_on_success, a transaction is rolled back even if
the first database-modifying operation fails.
This is prompted by http://code.djangoproject.com/ticket/6669 (and based on sample
code posted there to exemplify the problem): Before Django 1.3,
transactions were only marked "dirty" by the save() function after it successfully
wrote the object to the database.
"""
from django.contrib.auth.models import User
@transaction.commit_on_success
def create_system_user():
"Create a user in a transaction"
user = User.objects.create_user(username='system', password='iamr00t', email='root@SITENAME.com')
# Redundant, just makes sure the user id was read back from DB
Mod.objects.create(fld=user.id)
# Create a user
create_system_user()
with self.assertRaises(DatabaseError):
# The second call to create_system_user should fail for violating
# a unique constraint (it's trying to re-create the same user)
create_system_user()
# Try to read the database. If the last transaction was indeed closed,
# this should cause no problems
User.objects.all()[0]
@override_settings(DEBUG=True)
def test_failing_query_transaction_closed_debug(self):
"""
Regression for #6669. Same test as above, with DEBUG=True.
"""
self.test_failing_query_transaction_closed()
@skipUnless(connection.vendor == 'postgresql',
"This test only valid for PostgreSQL")
class TestPostgresAutocommit(TransactionTestCase):
"""
Tests to make sure psycopg2's autocommit mode is restored after entering
and leaving transaction management. Refs #16047.
"""
def setUp(self):
from psycopg2.extensions import (ISOLATION_LEVEL_AUTOCOMMIT,
ISOLATION_LEVEL_READ_COMMITTED)
self._autocommit = ISOLATION_LEVEL_AUTOCOMMIT
self._read_committed = ISOLATION_LEVEL_READ_COMMITTED
# We want a clean backend with autocommit = True, so
# first we need to do a bit of work to have that.
self._old_backend = connections[DEFAULT_DB_ALIAS]
settings = self._old_backend.settings_dict.copy()
opts = settings['OPTIONS'].copy()
opts['autocommit'] = True
settings['OPTIONS'] = opts
new_backend = self._old_backend.__class__(settings, DEFAULT_DB_ALIAS)
connections[DEFAULT_DB_ALIAS] = new_backend
def tearDown(self):
connections[DEFAULT_DB_ALIAS] = self._old_backend
def test_initial_autocommit_state(self):
self.assertTrue(connection.features.uses_autocommit)
self.assertEqual(connection.isolation_level, self._autocommit)
def test_transaction_management(self):
transaction.enter_transaction_management()
transaction.managed(True)
self.assertEqual(connection.isolation_level, self._read_committed)
transaction.leave_transaction_management()
self.assertEqual(connection.isolation_level, self._autocommit)
def test_transaction_stacking(self):
transaction.enter_transaction_management()
transaction.managed(True)
self.assertEqual(connection.isolation_level, self._read_committed)
transaction.enter_transaction_management()
self.assertEqual(connection.isolation_level, self._read_committed)
transaction.leave_transaction_management()
self.assertEqual(connection.isolation_level, self._read_committed)
transaction.leave_transaction_management()
self.assertEqual(connection.isolation_level, self._autocommit)
class TestManyToManyAddTransaction(TransactionTestCase):
def test_manyrelated_add_commit(self):
"Test for https://code.djangoproject.com/ticket/16818"
a = M2mA.objects.create()
b = M2mB.objects.create(fld=10)
a.others.add(b)
# We're in a TransactionTestCase and have not changed transaction
# behavior from default of "autocommit", so this rollback should not
# actually do anything. If it does in fact undo our add, that's a bug
# that the bulk insert was not auto-committed.
transaction.rollback()
self.assertEqual(a.others.count(), 1)
class SavepointTest(TransactionTestCase):
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_commit(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
mod1 = Mod.objects.filter(pk=pk).update(fld=10)
transaction.savepoint_commit(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 10)
work()
@skipIf(connection.vendor == 'mysql' and \
connection.features._mysql_storage_engine == 'MyISAM',
"MyISAM MySQL storage engine doesn't support savepoints")
@skipUnlessDBFeature('uses_savepoints')
def test_savepoint_rollback(self):
@commit_manually
def work():
mod = Mod.objects.create(fld=1)
pk = mod.pk
sid = transaction.savepoint()
mod1 = Mod.objects.filter(pk=pk).update(fld=20)
transaction.savepoint_rollback(sid)
mod2 = Mod.objects.get(pk=pk)
transaction.commit()
self.assertEqual(mod2.fld, 1)
work()
|
RichardLitt/wyrd-django-dev
|
tests/regressiontests/transactions_regress/tests.py
|
Python
|
bsd-3-clause
| 10,745
|
#!/usr/bin/env python
from mate_invest.defs import GTK_API_VERSION
import gi
gi.require_version("Gtk", GTK_API_VERSION)
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkPixbuf
from gi.repository import GObject
import os
import mate_invest
from gettext import gettext as _
from mate_invest import *
import sys
from os.path import join
import urllib
from threading import Thread
import time
AUTOREFRESH_TIMEOUT = 20*60*1000 # 15 minutes
# based on http://www.johnstowers.co.nz/blog/index.php/2007/03/12/threading-and-pygtk/
class _IdleObject(GObject.GObject):
"""
Override GObject.GObject to always emit signals in the main thread
by emmitting on an idle handler
"""
def __init__(self):
GObject.GObject.__init__(self)
def emit(self, *args):
GObject.idle_add(GObject.GObject.emit,self,*args)
class ImageRetriever(Thread, _IdleObject):
"""
Thread which uses gobject signals to return information
to the GUI.
"""
__gsignals__ = {
"completed": (
GObject.SignalFlags.RUN_LAST, None, []),
# FIXME: should we be making use of this?
#"progress": (
# GObject.SignalFlags.RUN_LAST, None, [
# GObject.TYPE_FLOAT]) #percent complete
}
def __init__(self, image_url):
Thread.__init__(self)
_IdleObject.__init__(self)
self.image_url = image_url
self.retrieved = False
def run(self):
self.image = Gtk.Image()
try: sock = urllib.urlopen(self.image_url, proxies = mate_invest.PROXY)
except Exception, msg:
mate_invest.debug("Error while opening %s: %s" % (self.image_url, msg))
else:
loader = GdkPixbuf.PixbufLoader()
loader.connect("closed", lambda loader: self.image.set_from_pixbuf(loader.get_pixbuf()))
loader.write(sock.read())
sock.close()
loader.close()
self.retrieved = True
self.emit("completed")
# p:
# eX = Exponential Moving Average
# mX = Moving Average
# b = Bollinger Bands Overlay
# v = Volume Overlay
# p = Parabolic SAR overlay
# s = Splits Overlay
# q:
# l = Line
# c = Candles
# b = Bars
# l:
# on = Logarithmic
# off = Linear
# z:
# l = Large
# m = Medium
# t:
# Xd = X Days
# Xm = X Months
# Xy = X Years
# a:
# fX = MFI X days
# ss = Slow Stochastic
# fs = Fast Stochastic
# wX = W%R X Days
# mX-Y-Z = MACD X Days, Y Days, Signal
# pX = ROC X Days
# rX = RSI X Days
# v = Volume
# vm = Volume +MA
# c:
# X = compare with X
#
class FinancialChart:
def __init__(self, ui):
self.ui = ui
#Time ranges of the plot (parameter / combo-box t)
self.time_ranges = ["1d", "5d", "3m", "6m", "1y", "5y", "my"]
#plot types (parameter / combo-box q)
self.plot_types = ["l", "b", "c"]
#plot scales (parameter / combo-box l)
self.plot_scales = ["off", "on"]
# Window Properties
win = ui.get_object("window")
win.set_title(_("Financial Chart"))
try:
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(join(mate_invest.ART_DATA_DIR, "invest_neutral.svg"), 96,96)
self.ui.get_object("plot").set_from_pixbuf(pixbuf)
except Exception, msg:
mate_invest.debug("Could not load 'invest-neutral.svg' file: %s" % msg)
pass
# Defaut comboboxes values
for widget in ["t", "q", "l"]:
ui.get_object(widget).set_active(0)
# Connect every option widget to its corresponding change signal
symbolentry = ui.get_object("s")
refresh_chart_callback = lambda w: self.on_refresh_chart()
for widgets, signal in [
(("pm5","pm10","pm20","pm50","pm100","pm200",
"pe5","pe10", "pe20","pe50","pe100","pe200",
"pb","pp","ps","pv",
"ar","af","ap","aw","am","ass","afs","av","avm"), "toggled"),
(("t", "q", "l"), "changed"),
(("s",), "activate"),
]:
for widget in widgets:
ui.get_object(widget).connect(signal, refresh_chart_callback)
ui.get_object("progress").hide()
# Connect auto-refresh widget
self.autorefresh_id = 0
ui.get_object("autorefresh").connect("toggled", self.on_autorefresh_toggled)
def on_refresh_chart(self, from_timer=False):
tickers = self.ui.get_object("s").get_text()
if tickers.strip() == "":
return True
# FIXME: We don't just do US stocks, so we can't be this
# simplistic about it, but it is a good idea.
#if from_timer and not ustime.hour_between(9, 16):
# return True
tickers = [ticker.strip().upper() for ticker in tickers.split(' ') if ticker != ""]
# Update Window Title ------------------------------------------------------
win = self.ui.get_object("window")
title = _("Financial Chart - %s")
titletail = ""
for ticker in tickers:
titletail += "%s / " % ticker
title = title % titletail
win.set_title(title[:-3])
# Detect Comparison or simple chart ----------------------------------------
opt = ""
for ticker in tickers[1:]:
opt += "&c=%s" % ticker
# Create the overlay string ------------------------------------------------
p = ""
for name, param in [
("pm5", 5),
("pm10", 10),
("pm20", 20),
("pm50", 50),
("pm100", 100),
("pm200", 200),
("pe5", 5),
("pe10", 10),
("pe20", 20),
("pe50", 50),
("pe100", 100),
("pe200", 200),
("pb", ""),
("pp", ""),
("ps", ""),
("pv", ""),
]:
if self.ui.get_object(name).get_active():
p += "%s%s," % (name[1], param)
# Create the indicators string ---------------------------------------------
a = ""
for name, param in [
("ar", 14),
("af", 14),
("ap", 12),
("aw", 14),
("am", "26-12-9"),
("ass", ""),
("afs", ""),
("av", ""),
("avm", ""),
]:
if self.ui.get_object(name).get_active():
a += "%s%s," % (name[1:], param)
# Create the image URL -----------------------------------------------------
chart_base_url = "http://chart.finance.yahoo.com/z?s=%(s)s&t=%(t)s&q=%(q)s&l=%(l)s&z=%(z)s&p=%(p)s&a=%(a)s%(opt)s"
url = chart_base_url % {
"s": tickers[0],
"t": self.time_ranges[self.ui.get_object("t").get_active()],
"q": self.plot_types[self.ui.get_object("q").get_active()],
"l": self.plot_scales[self.ui.get_object("l").get_active()],
"z": "l",
"p": p,
"a": a,
"opt": opt,
}
# Download and display the image -------------------------------------------
progress = self.ui.get_object("progress")
progress.set_text(_("Opening Chart"))
progress.show()
image_retriever = ImageRetriever(url)
image_retriever.connect("completed", self.on_retriever_completed)
image_retriever.start()
# Update timer if needed
self.on_autorefresh_toggled(self.ui.get_object("autorefresh"))
return True
def on_retriever_completed(self, retriever):
self.ui.get_object("plot").set_from_pixbuf(retriever.image.get_pixbuf())
progress = self.ui.get_object("progress")
if retriever.retrieved == True:
progress.set_text(_("Chart downloaded"))
else:
progress.set_text(_("Chart could not be downloaded"))
def on_autorefresh_toggled(self, autorefresh):
if self.autorefresh_id != 0:
GObject.source_remove(self.autorefresh_id)
self.autorefresh_id = 0
if autorefresh.get_active():
self.autorefresh_id = GObject.timeout_add(AUTOREFRESH_TIMEOUT, self.on_refresh_chart, True)
def show_chart(tickers):
ui = Gtk.Builder();
ui.add_from_file(os.path.join(mate_invest.BUILDER_DATA_DIR, "financialchart.ui"))
chart = FinancialChart(ui)
ui.get_object("s").set_text(' '.join(tickers))
chart.on_refresh_chart()
return ui.get_object("window")
|
nmercier/linux-cross-gcc
|
linux/lib/python2.7/dist-packages/mate_invest/chart.py
|
Python
|
bsd-3-clause
| 7,393
|
"""Calxeda: simg.py"""
# Copyright (c) 2012-2013, Calxeda Inc.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Calxeda Inc. nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
import struct
from cxmanage_api.crc32 import get_crc32
HEADER_LENGTH = 60
MIN_HEADER_LENGTH = 28
# pylint: disable=R0913, R0903, R0902
class SIMGHeader(object):
"""Container for an SIMG header.
>>> from cxmanage_api.simg import SIMGHeader
>>> simg = SIMGHeader()
:param header_string: SIMG Header value.
:type header_string: string
"""
def __init__(self, header_string=None):
"""Default constructor for the SIMGHeader class."""
if (header_string == None):
self.magic_string = 'SIMG'
self.hdrfmt = 2
self.priority = 0
self.imgoff = HEADER_LENGTH
self.imglen = 0
self.daddr = 0
self.flags = 0
self.crc32 = 0
self.version = ''
else:
header_string = header_string.ljust(HEADER_LENGTH, chr(0))
tup = struct.unpack('<4sHHIIIII32s', header_string)
self.magic_string = tup[0]
self.hdrfmt = tup[1]
self.priority = tup[2]
self.imgoff = tup[3]
self.imglen = tup[4]
self.daddr = tup[5]
self.flags = tup[6]
self.crc32 = tup[7]
if (self.hdrfmt >= 2):
self.version = tup[8]
else:
self.version = ''
def __str__(self):
return struct.pack('<4sHHIIIII32s', self.magic_string, self.hdrfmt,
self.priority, self.imgoff, self.imglen, self.daddr,
self.flags, self.crc32, self.version)
def create_simg(contents, priority=0, daddr=0, skip_crc32=False, align=False,
version=None):
"""Create an SIMG version of a file.
>>> from cxmanage_api.simg import create_simg
>>> simg = create_simg(contents='foobarbaz')
>>> simg
'SIMG\\x02\\x00\\x00\\x00<\\x00\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00\\x00
\\x00\\xff\\xff\\xff\\xffK\\xf3\\xea\\x0c\\x00\\x00\\x00\\x00\\x00\\x00
\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00
\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00foobarbaz'
:param contents: Contents of the SIMG file.
:type contents: string
:param priority: SIMG Header priority value.
:type priority: integer
:param daddr: SIMG Header daddr value.
:type daddr: integer
:param skip_crc32: Flag to skip crc32 calculating.
:type skip_crc32: boolean
:param align: Flag used to turn on/off image offset of 4096.
:type align: boolean
:param version: Version string.
:type version: string
:returns: String representation of the SIMG file.
:rtype: string
"""
if (version == None):
version = ''
header = SIMGHeader()
header.priority = priority
header.imglen = len(contents)
header.daddr = daddr
header.version = version
if (align):
header.imgoff = 4096
# Calculate crc value
if (skip_crc32):
crc32 = 0
else:
crc32 = get_crc32(contents, get_crc32(str(header)[:MIN_HEADER_LENGTH]))
# Get SIMG header
header.flags = 0xFFFFFFFF
header.crc32 = crc32
return str(header).ljust(header.imgoff, chr(0)) + contents
def has_simg(simg):
"""Returns true if this string has an SIMG header.
>>> from cxmanage_api.simg import create_simg
>>> simg=create_simg(contents='foobarbaz')
>>> from cxmanage_api.simg import has_simg
>>> has_simg(simg=simg)
True
:param simg: SIMG string (representation of a SIMG file).
:type simg: string
:returns: Whether or not the string has a SIMG header.
:rtype: boolean
"""
if (len(simg) < MIN_HEADER_LENGTH):
return False
header = SIMGHeader(simg[:HEADER_LENGTH])
# Check for magic word
return (header.magic_string == 'SIMG')
def valid_simg(simg):
"""Return true if this is a valid SIMG.
>>> from cxmanage_api.simg import create_simg
>>> simg=create_simg(contents='foobarbaz')
>>> from cxmanage_api.simg import valid_simg
>>> valid_simg(simg=simg)
True
:param simg: SIMG string (representation of a SIMG file).
:type simg: string
:returns: Whether or not the SIMG is valid.
:rtype: boolean
"""
if (not has_simg(simg)):
return False
header = SIMGHeader(simg[:HEADER_LENGTH])
# Check offset
if (header.imgoff < MIN_HEADER_LENGTH):
return False
# Check length
start = header.imgoff
end = start + header.imglen
contents = simg[start:end]
if (len(contents) < header.imglen):
return False
# Check crc32
crc32 = header.crc32
if (crc32 != 0):
header.flags = 0
header.crc32 = 0
if (crc32 != get_crc32(contents,
get_crc32(str(header)[:MIN_HEADER_LENGTH]))):
return False
return True
def get_simg_header(simg):
"""Returns the header of this SIMG.
>>> from cxmanage_api.simg import get_simg_header
>>> get_simg_header(x)
<cxmanage_api.simg.SIMGHeader instance at 0x7f4d1ce9aef0>
:param simg: Path to SIMG file.
:type simg: string
:returns: The SIMG header.
:rtype: string
:raises ValueError: If the SIMG cannot be read.
"""
if (not valid_simg(simg)):
raise ValueError("Failed to read invalid SIMG")
return SIMGHeader(simg[:HEADER_LENGTH])
def get_simg_contents(simg):
"""Returns the contents of this SIMG.
>>> from cxmanage_api.simg import get_simg_contents
>>> get_simg_contents(simg=simg)
'foobarbaz'
:param simg: Path to SIMG file.
:type simg: string
:returns: Contents of this SIMG.
:rtype: string
"""
header = get_simg_header(simg)
start = header.imgoff
end = start + header.imglen
return simg[start:end]
# End of file: ./simg.py
|
SilverLiningSystems/cxmanage-test2
|
cxmanage_api/simg.py
|
Python
|
bsd-3-clause
| 7,401
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
========================================================================
Gaussian Processes regression: goodness-of-fit on the 'diabetes' dataset
========================================================================
This example consists in fitting a Gaussian Process model onto the diabetes
dataset.
The correlation parameters are determined by means of maximum likelihood
estimation (MLE). An anisotropic squared exponential correlation model with a
constant regression model are assumed. We also used a nugget = 1e-2 in order to
account for the (strong) noise in the targets.
We compute then compute a cross-validation estimate of the coefficient of
determination (R2) without reperforming MLE, using the set of correlation
parameters found on the whole dataset.
"""
print(__doc__)
# Author: Vincent Dubourg <vincent.dubourg@gmail.com>
# License: BSD style
from sklearn import datasets
from sklearn.gaussian_process import GaussianProcess
from sklearn.cross_validation import cross_val_score, KFold
# Load the dataset from scikit's data sets
diabetes = datasets.load_diabetes()
X, y = diabetes.data, diabetes.target
# Instanciate a GP model
gp = GaussianProcess(regr='constant', corr='absolute_exponential',
theta0=[1e-4] * 10, thetaL=[1e-12] * 10,
thetaU=[1e-2] * 10, nugget=1e-2, optimizer='Welch')
# Fit the GP model to the data performing maximum likelihood estimation
gp.fit(X, y)
# Deactivate maximum likelihood estimation for the cross-validation loop
gp.theta0 = gp.theta # Given correlation parameter = MLE
gp.thetaL, gp.thetaU = None, None # None bounds deactivate MLE
# Perform a cross-validation estimate of the coefficient of determination using
# the cross_validation module using all CPUs available on the machine
K = 20 # folds
R2 = cross_val_score(gp, X, y=y, cv=KFold(y.size, K), n_jobs=1).mean()
print("The %d-Folds estimate of the coefficient of determination is R2 = %s"
% (K, R2))
|
florian-f/sklearn
|
examples/gaussian_process/gp_diabetes_dataset.py
|
Python
|
bsd-3-clause
| 2,017
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
def gen_uuid(apps, schema_editor):
WooeyJob = apps.get_model('wooey', 'WooeyJob')
for obj in WooeyJob.objects.all():
obj.uuid = uuid.uuid4()
obj.save()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0012_wooeyjob_uuid'),
]
operations = [
# Set the uuids for existing records
migrations.RunPython(gen_uuid),
]
|
wooey/Wooey
|
wooey/migrations/0013_wooeyjob_uuid_populate.py
|
Python
|
bsd-3-clause
| 517
|
import sys
from json import loads
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
from unittest import mock
except ImportError:
import mock
from django.conf import settings
from django.utils.translation import activate
from django.template.base import Template, TemplateSyntaxError
from django.template.context import Context
from django.test.utils import override_settings
from django.test import TestCase
from django.contrib.auth.models import Permission
from django.contrib.admin.sites import site
from django.core.management import call_command
from django.core.exceptions import ImproperlyConfigured
from django.conf.urls import patterns, url
from sitetree.models import Tree, TreeItem
from sitetree.forms import TreeItemForm
from sitetree.admin import TreeAdmin, TreeItemAdmin, redirects_handler
from sitetree.utils import (
tree, item, get_app_n_model, import_app_sitetree_module, import_project_sitetree_modules, get_model_class
)
from sitetree.sitetreeapp import (
SiteTree, SiteTreeError, register_items_hook, register_i18n_trees, register_dynamic_trees, compose_dynamic_tree
)
urlpatterns = patterns(
'',
url(r'articles/', lambda r: None, name='articles_list'),
url(r'articles/(\d+)/', lambda r: None, name='articles_detailed'),
url(r'articles/(?P<id>\d+)_(?P<slug>[\w-]+)/', lambda r: None, name='url'),
)
class MockRequest(object):
def __init__(self, path=None, user_authorized=None, meta=None):
if path is None:
path = '/'
if user_authorized is None:
user_authorized = True
self.path = path
self.user = MockUser(user_authorized)
self.META = meta
class MockUser(object):
def __init__(self, authorized):
self.authorized = authorized
def is_authenticated(self):
return self.authorized
def get_mock_context(app=None, path=None, user_authorized=False, tree_item=None, put_var=None):
ctx = Context(
{
'request': MockRequest(path, user_authorized),
't2_root2_title': 'my_real_title', 'art_id': 10, 'tree_item': tree_item,
'somevar_str': 'articles_list', 'somevar_list': ['a', 'b'], 'put_var': put_var
},
current_app=app
)
ctx.template = mock.MagicMock()
ctx.template.engine.string_if_invalid = ''
return ctx
def render_string(string, context=None, context_put_var=None, context_path=None):
return Template(string).render(Context(context or get_mock_context(path=context_path, put_var=context_put_var)))
class SitetreeTest(TestCase):
@classmethod
def setUpClass(cls):
cls.init_trees()
@classmethod
def init_trees(cls):
cls.sitetree = SiteTree()
###########################################################
t1 = Tree(alias='tree1')
t1.save()
cls.t1 = t1
t1_root = TreeItem(title='root', tree=t1, url='/')
t1_root.save()
cls.tree_ttags_root = t1_root
t1_root_child1 = TreeItem(title='child1', tree=t1, parent=t1_root, url='/about/')
t1_root_child1.save()
cls.tree_ttags_root_child1 = t1_root_child1
t1_root_child2 = TreeItem(title='child2', tree=t1, parent=t1_root, url='articles_list', urlaspattern=True,
description='items_descr')
t1_root_child2.save()
cls.t1_root_child2 = t1_root_child2
t1_root_child2_sub1 = TreeItem(title='subchild1', tree=t1, parent=t1_root_child2,
url='articles_detailed art_id', urlaspattern=True)
t1_root_child2_sub1.save()
cls.t1_root_child2_sub1 = t1_root_child2_sub1
t1_root_child2_sub2 = TreeItem(title='subchild2', tree=t1, parent=t1_root_child2, url='/not_articles/10/')
t1_root_child2_sub2.save()
cls.t1_root_child2_sub2 = t1_root_child2_sub2
t1_root_child3 = TreeItem(title='child_with_var_str', tree=t1, parent=t1_root, url='somevar_str',
urlaspattern=True)
t1_root_child3.save()
cls.t1_root_child3 = t1_root_child3
t1_root_child4 = TreeItem(title='child_with_var_list', tree=t1, parent=t1_root, url='somevar_list',
urlaspattern=True)
t1_root_child4.save()
t2 = Tree(alias='tree2')
t2.save()
cls.t2 = t2
t2_root1 = TreeItem(title='{{ t2_root1_title }}', tree=t2, url='/')
t2_root1.save()
cls.t2_root1 = t2_root1
t2_root2 = TreeItem(title='put {{ t2_root2_title }} inside', tree=t2, url='/sub/')
t2_root2.save()
cls.t2_root2 = t2_root2
t2_root3 = TreeItem(title='for logged in only', tree=t2, url='/some/', access_loggedin=True)
t2_root3.save()
cls.t2_root3 = t2_root3
t2_root4 = TreeItem(title='url quoting', tree=t2, url='url 2 put_var', urlaspattern=True)
t2_root4.save()
cls.t2_root4 = t2_root4
t2_root5 = TreeItem(title='url quoting 1.5 style', tree=t2, url="'url' 2 put_var", urlaspattern=True)
t2_root5.save()
cls.t2_root5 = t2_root5
t2_root6 = TreeItem(title='url quoting 1.5 style', tree=t2, url='"url" 2 put_var', urlaspattern=True)
t2_root6.save()
cls.t2_root6 = t2_root6
t2_root7 = TreeItem(title='for guests only', tree=t2, url='/some_other/', access_guest=True)
t2_root7.save()
cls.t2_root7 = t2_root7
###########################################################
t3 = Tree(alias='tree3')
t3.save()
cls.t3 = t3
t3_en_root = TreeItem(title='root', tree=t3, url='/', hidden=True)
t3_en_root.save()
cls.t3_root = t3_en_root
t3_root_child1 = TreeItem(title='child1', tree=t3, parent=t3_en_root, url='/0/', access_loggedin=True)
t3_root_child1.save()
cls.t3_root_child1 = t3_root_child1
t3_root_child2 = TreeItem(title='child2', tree=t3, parent=t3_en_root, url='/1/', inmenu=True, hidden=True)
t3_root_child2.save()
cls.t3_root_child2 = t3_root_child2
t3_root_child3 = TreeItem(title='child3', tree=t3, parent=t3_en_root, url='/the_same_url/', inmenu=False)
t3_root_child3.save()
cls.t3_root_child3 = t3_root_child3
t3_root_child4 = TreeItem(title='child4', tree=t3, parent=t3_en_root, url='/3/', hidden=True)
t3_root_child4.save()
cls.t3_root_child4 = t3_root_child4
t3_root_child5 = TreeItem(title='child5', tree=t3, parent=t3_en_root, url='/4/', inmenu=True, hidden=True)
t3_root_child5.save()
cls.t3_root_child5 = t3_root_child5
t3_en = Tree(alias='tree3_en', title='tree3en_title')
t3_en.save()
cls.t3_en = t3_en
t3_en_root = TreeItem(title='root_en', tree=t3_en, url='/')
t3_en_root.save()
cls.t3_en_root = t3_en_root
t3_en_root_child1 = TreeItem(title='child1_en', tree=t3_en, parent=t3_en_root, url='/0_en/')
t3_en_root_child1.save()
t3_en_root_child2 = TreeItem(title='child2_en', tree=t3_en, parent=t3_en_root, url='/the_same_url/')
t3_en_root_child2.save()
###########################################################
tree_main = Tree(alias='main')
tree_main.save()
cls.tree_main = tree_main
tree_main_root = TreeItem(title='root', tree=tree_main, url='/', alias='for_dynamic')
tree_main_root.save()
cls.tree_main_root = tree_main_root
@classmethod
def tearDownClass(cls):
Tree.objects.all().delete()
TreeItem.objects.all().delete()
class TreeModelTest(SitetreeTest):
def test_create_rename_delete(self):
tree = Tree(alias='mytree')
tree.save()
self.assertIsNotNone(tree.id)
self.assertEqual(tree.alias, 'mytree')
tree.alias = 'not_mytree'
tree.save(force_update=True)
self.assertEqual(tree.alias, 'not_mytree')
tree.delete()
self.assertIsNone(tree.id)
def test_unique_aliases(self):
tree1 = Tree(alias='mytree')
tree1.save()
tree2 = Tree(alias='mytree')
self.assertRaises(Exception, tree2.save)
class TreeItemModelTest(SitetreeTest):
def test_url_resolve(self):
self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/', put_var='abrakadabra'))
url = self.sitetree.url(self.t2_root4, get_mock_context(path='/articles/2_slugged/'))
self.assertTrue(url.find('abrakadabra') > -1)
self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/', put_var='booo'))
url = self.sitetree.url(self.t2_root4, get_mock_context(path='/articles/2_slugged-mugged/'))
self.assertTrue(url.find('booo') > -1)
self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/', put_var='rolling'))
url = self.sitetree.url(self.t2_root5, get_mock_context(path='/articles/2_quoted/'))
self.assertTrue(url.find('rolling') > -1)
self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/', put_var='spoon'))
url = self.sitetree.url(self.t2_root6, get_mock_context(path='/articles/2_quoted/'))
self.assertTrue(url.find('spoon') > -1)
def test_no_tree(self):
ti = TreeItem(title='notree_item')
self.assertRaises(Exception, ti.save)
def test_create_rename_delete(self):
ti1 = TreeItem(title='new_root_item', tree=self.t1)
ti1.save()
self.assertIsNotNone(ti1.id)
self.assertEqual(ti1.title, 'new_root_item')
ti1.title = 'not_new_root_item'
ti1.save(force_update=True)
self.assertEqual(ti1.title, 'not_new_root_item')
ti1.delete()
self.assertIsNone(ti1.id)
def test_context_proc_required(self):
context = Context()
old_debug = settings.DEBUG
settings.DEBUG = True
self.assertRaises(SiteTreeError, self.sitetree.menu, 'tree1', 'trunk', context)
settings.DEBUG = old_debug
def test_menu(self):
menu = self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/about/'))
self.assertEqual(len(menu), 1)
self.assertEqual(menu[0].id, self.tree_ttags_root.id)
self.assertEqual(menu[0].is_current, False)
self.assertEqual(menu[0].depth, 0)
self.assertEqual(menu[0].has_children, True)
self.assertEqual(menu[0].in_current_branch, True)
menu = self.sitetree.menu('tree2', 'trunk', get_mock_context(path='/sub/'))
self.assertEqual(len(menu), 6)
self.assertEqual(menu[0].id, self.t2_root1.id)
self.assertEqual(menu[1].id, self.t2_root2.id)
self.assertEqual(menu[0].is_current, False)
self.assertEqual(menu[0].in_current_branch, False)
self.assertEqual(menu[1].is_current, True)
self.assertEqual(menu[1].in_current_branch, True)
self.assertEqual(menu[0].depth, 0)
self.assertEqual(menu[1].depth, 0)
self.assertEqual(menu[0].has_children, False)
self.assertEqual(menu[1].has_children, False)
def test_breadcrumbs(self):
bc1 = self.sitetree.breadcrumbs('tree1', get_mock_context(path='/not_articles/10/'))
self.assertEqual(len(bc1), 3)
self.assertEqual(bc1[0].id, self.tree_ttags_root.id)
self.assertEqual(bc1[1].id, self.t1_root_child2.id)
self.assertEqual(bc1[1].url_resolved, '/articles/')
self.assertEqual(bc1[2].id, self.t1_root_child2_sub2.id)
self.assertEqual(bc1[0].is_current, False)
self.assertEqual(bc1[1].is_current, False)
self.assertEqual(bc1[2].is_current, True)
self.assertEqual(bc1[0].has_children, True)
self.assertEqual(bc1[1].has_children, True)
self.assertEqual(bc1[2].has_children, False)
self.assertEqual(bc1[0].depth, 0)
self.assertEqual(bc1[1].depth, 1)
self.assertEqual(bc1[2].depth, 2)
def test_page_title(self):
title = self.sitetree.get_current_page_title('tree1', get_mock_context(path='/articles/'))
self.assertEqual(title, self.t1_root_child2.title)
title = self.sitetree.get_current_page_title('tree1', get_mock_context(path='/not_articles/'))
self.assertEqual(title, '')
def test_page_attr(self):
attr = self.sitetree.get_current_page_attr('description', 'tree1', get_mock_context(path='/articles/'))
self.assertEqual(attr, self.t1_root_child2.description)
attr = self.sitetree.get_current_page_attr('description', 'tree1', get_mock_context(path='/not_articles/'))
self.assertEqual(attr, '')
def test_sitetree(self):
st1 = self.sitetree.tree('tree1', get_mock_context(path='/articles/'))
self.assertEqual(len(st1), 1)
self.assertEqual(st1[0].id, self.tree_ttags_root.id)
self.assertEqual(st1[0].is_current, False)
self.assertEqual(st1[0].depth, 0)
self.assertEqual(st1[0].has_children, True)
st2 = self.sitetree.tree('tree2', get_mock_context(path='/'))
self.assertIn(self.t2_root7, st2) # Not every item is visible for non logged in.
self.assertNotIn(self.t2_root3, st2)
self.assertEqual(len(st2), 6)
self.assertEqual(st2[0].id, self.t2_root1.id)
self.assertEqual(st2[1].id, self.t2_root2.id)
self.assertEqual(self.t2_root1.access_loggedin, False)
self.assertEqual(self.t2_root1.access_guest, False)
self.assertEqual(self.t2_root2.access_loggedin, False)
self.assertEqual(self.t2_root2.access_guest, False)
self.assertEqual(self.t2_root3.access_loggedin, True)
self.assertEqual(self.t2_root3.access_guest, False)
self.assertEqual(self.t2_root7.access_loggedin, False)
self.assertEqual(self.t2_root7.access_guest, True)
self.assertEqual(st2[0].title, '{{ t2_root1_title }}')
self.assertEqual(st2[1].title, 'put {{ t2_root2_title }} inside')
self.assertEqual(st2[0].title_resolved, '')
self.assertEqual(st2[1].title_resolved, 'put my_real_title inside')
self.assertEqual(st2[0].is_current, True)
self.assertEqual(st2[1].is_current, False)
self.assertEqual(st2[0].depth, 0)
self.assertEqual(st2[1].depth, 0)
self.assertEqual(st2[0].has_children, False)
self.assertEqual(st2[1].has_children, False)
st2 = self.sitetree.tree('tree2', get_mock_context(path='/', user_authorized=True))
self.assertNotIn(self.t2_root7, st2) # Not every item is visible for non logged in.
self.assertIn(self.t2_root3, st2)
self.assertEqual(len(st2), 6)
def test_items_hook_tree(self):
def my_processor(tree_items, tree_sender):
for item in tree_items:
item.title_resolved = 'FakedTreeItem'
return tree_items
register_items_hook(my_processor)
items = self.sitetree.tree('tree1', get_mock_context(path='/'))
register_items_hook(None)
self.assertEqual(items[0].title_resolved, 'FakedTreeItem')
def test_items_hook_menu(self):
def my_processor(tree_items, tree_sender):
for item in tree_items:
item.title_resolved = 'FakedMenuItem'
return tree_items
register_items_hook(my_processor)
items = self.sitetree.menu('tree1', 'trunk', get_mock_context(path='/'))
register_items_hook(None)
self.assertEqual(items[0].title_resolved, 'FakedMenuItem')
def test_items_hook_breadcrumbs(self):
def my_processor(tree_items, tree_sender):
for item in tree_items:
item.title_resolved = 'FakedBreadcrumbsItem'
return tree_items
register_items_hook(my_processor)
items = self.sitetree.breadcrumbs('tree1', get_mock_context(path='/not_articles/10/'))
register_items_hook(None)
self.assertEqual(items[0].title_resolved, 'FakedBreadcrumbsItem')
class TemplateTagsTest(SitetreeTest):
@classmethod
def setUpClass(cls):
cls.sitetree = SiteTree()
tree_ttags = Tree(alias='ttags')
tree_ttags.save()
cls.tree_ttags = tree_ttags
tree_ttags_root = TreeItem(
title='root', tree=tree_ttags, url='/',
insitetree=True, inbreadcrumbs=True, inmenu=True
)
tree_ttags_root.save()
cls.tree_ttags_root = tree_ttags_root
tree_ttags_root_child1 = TreeItem(
title='sometitle', tree=tree_ttags, parent=tree_ttags_root, url='/child1',
insitetree=True, inbreadcrumbs=True, inmenu=True,
hint='somehint', description='somedescr'
)
tree_ttags_root_child1.save()
cls.tree_ttags_root_child1 = tree_ttags_root_child1
def test_sitetree_tree(self):
tpl = '{% load sitetree %}{% sitetree_tree "mytree" %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
tpl = '{% load sitetree %}{% sitetree_tree from "mytree" %}'
result = render_string(tpl)
self.assertEqual(result.strip(), '')
tpl = '{% load sitetree %}{% sitetree_tree from "ttags" %}'
result = render_string(tpl)
self.assertIn('href="/"', result)
def test_sitetree_children(self):
context = get_mock_context(put_var=self.tree_ttags_root)
self.sitetree.set_global_context(context)
tpl = '{% load sitetree %}{% sitetree_children %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
tpl = '{% load sitetree %}{% sitetree_children of put_var for sitetree template "sitetree/tree.html" %}'
result = render_string(tpl, context=context)
self.assertIn('href="/child1"', result)
def test_sitetree_breadcrumbs(self):
tpl = '{% load sitetree %}{% sitetree_breadcrumbs %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
tpl = '{% load sitetree %}{% sitetree_breadcrumbs from "mytree" %}'
result = render_string(tpl)
self.assertEqual(result.strip(), '<ul>\n\t\n</ul>')
tpl = '{% load sitetree %}{% sitetree_breadcrumbs from "ttags" %}'
result = render_string(tpl, context_path='/child1')
self.assertIn('href="/"', result)
self.assertIn('root', result)
self.assertIn('sometitle', result)
def test_sitetree_menu(self):
tpl = '{% load sitetree %}{% sitetree_menu %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
tpl = '{% load sitetree %}{% sitetree_menu from "mytree" include "trunk" %}'
result = render_string(tpl)
self.assertEqual(result.strip(), '<ul>\n\t\n</ul>')
tpl = '{% load sitetree %}{% sitetree_menu from "ttags" include "trunk" %}'
result = render_string(tpl, context_path='/child1')
self.assertIn('current_branch">root', result)
self.assertIn('current_item current_branch">sometitle', result)
def test_sitetree_page_title(self):
tpl = '{% load sitetree %}{% sitetree_page_title %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
with override_settings(DEBUG=True):
tpl = '{% load sitetree %}{% sitetree_page_title from "ttags" %}'
self.assertRaises(SiteTreeError, render_string, tpl, context_path='/somewhere')
tpl = '{% load sitetree %}{% sitetree_page_title from "ttags" %}'
result = render_string(tpl, context_path='/child1')
self.assertEqual(result, 'sometitle')
def test_sitetree_page_hint(self):
tpl = '{% load sitetree %}{% sitetree_page_hint %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
with override_settings(DEBUG=True):
tpl = '{% load sitetree %}{% sitetree_page_hint from "ttags" %}'
self.assertRaises(SiteTreeError, render_string, tpl, context_path='/somewhere')
tpl = '{% load sitetree %}{% sitetree_page_hint from "ttags" %}'
result = render_string(tpl, context_path='/child1')
self.assertEqual(result, 'somehint')
def test_sitetree_page_description(self):
tpl = '{% load sitetree %}{% sitetree_page_description %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
with override_settings(DEBUG=True):
tpl = '{% load sitetree %}{% sitetree_page_description from "ttags" %}'
self.assertRaises(SiteTreeError, render_string, tpl, context_path='/somewhere')
tpl = '{% load sitetree %}{% sitetree_page_description from "ttags" %}'
result = render_string(tpl, context_path='/child1')
self.assertEqual(result, 'somedescr')
def test_sitetree_url(self):
tpl = '{% load sitetree %}{% sitetree_url %}'
self.assertRaises(TemplateSyntaxError, render_string, tpl)
context = get_mock_context(path='/child1', put_var=self.tree_ttags_root_child1)
tpl = '{% load sitetree %}{% sitetree_url for put_var %}'
result = render_string(tpl, context)
self.assertEqual(result, '/child1')
tpl = '{% load sitetree %}{% sitetree_url for put_var as res_var %}'
render_string(tpl, context)
self.assertEqual(context.get('res_var'), '/child1')
class TreeTest(SitetreeTest):
def test_str(self):
self.assertEqual(self.t3.alias, str(self.t3))
def test_get_title(self):
self.assertEqual(self.t3.get_title(), 'tree3')
self.assertEqual(self.t3_en.get_title(), 'tree3en_title')
def test_children_filtering(self):
self.sitetree._global_context = get_mock_context(path='/')
self.sitetree.get_sitetree('tree3')
children = self.sitetree.get_children('tree3', self.t3_root)
filtered = self.sitetree.filter_items(children, 'menu')
self.assertEqual(filtered, [])
def test_tree_filtering(self):
tree = self.sitetree.tree('tree3', get_mock_context(path='/'))
self.assertEqual(len(tree), 0)
def test_register_i18n_trees(self):
register_i18n_trees(['tree3'])
self.sitetree._global_context = get_mock_context(path='/the_same_url/')
activate('en')
self.sitetree.get_sitetree('tree3')
children = self.sitetree.get_children('tree3', self.t3_en_root)
self.assertEqual(len(children), 2)
self.assertFalse(children[0].is_current)
self.assertTrue(children[1].is_current)
activate('ru')
self.sitetree.lang_init()
self.sitetree.get_sitetree('tree3')
children = self.sitetree.get_children('tree3', self.t3_root)
self.assertEqual(len(children), 5)
self.assertFalse(children[1].is_current)
self.assertTrue(children[2].is_current)
self.assertFalse(children[3].is_current)
class DynamicTreeTest(SitetreeTest):
def test_basic_old(self):
self.basic_test()
def test_basic_new(self):
self.basic_test(new_style=True)
def basic_test(self, new_style=False, reset_cache=False):
trees = (
compose_dynamic_tree((
tree('dynamic_main_root', items=(
item('dynamic_main_root_1', 'dynamic_main_root_1_url', url_as_pattern=False),
item('dynamic_main_root_2', 'dynamic_main_root_2_url', url_as_pattern=False),
)),
), target_tree_alias='main'),
compose_dynamic_tree((
tree('dynamic_main_sub', items=(
item('dynamic_main_sub_1', 'dynamic_main_sub_1_url', url_as_pattern=False),
item('dynamic_main_sub_2', 'dynamic_main_sub_2_url', url_as_pattern=False),
)),
), target_tree_alias='main', parent_tree_item_alias='for_dynamic'),
compose_dynamic_tree((
tree('dynamic', items=(
item('dynamic_1', 'dynamic_1_url', children=(
item('dynamic_1_sub_1', 'dynamic_1_sub_1_url', url_as_pattern=False),
), url_as_pattern=False),
item('dynamic_2', 'dynamic_2_url', url_as_pattern=False),
)),
)),
)
kwargs = {
'reset_cache': reset_cache
}
if new_style:
register_dynamic_trees(*trees, **kwargs)
else:
register_dynamic_trees(trees, **kwargs)
self.sitetree._global_context = get_mock_context(path='/the_same_url/')
tree_alias, sitetree_items = self.sitetree.get_sitetree('main')
self.assertEqual(len(sitetree_items), 5)
self.assertEqual(sitetree_items[3].title, 'dynamic_main_root_1')
self.assertEqual(sitetree_items[4].title, 'dynamic_main_root_2')
children = self.sitetree.get_children('main', self.tree_main_root)
self.assertEqual(len(children), 2)
tree_alias, sitetree_items = self.sitetree.get_sitetree('dynamic')
self.assertEqual(len(sitetree_items), 3)
children = self.sitetree.get_children('dynamic', sitetree_items[0])
self.assertEqual(len(children), 1)
class UtilsItemTest(SitetreeTest):
def test_permission_any(self):
i1 = item('root', 'url')
self.assertEqual(i1.access_perm_type, i1.PERM_TYPE_ALL)
i2 = item('root', 'url', perms_mode_all=True)
self.assertEqual(i2.access_perm_type, i1.PERM_TYPE_ALL)
i3 = item('root', 'url', perms_mode_all=False)
self.assertEqual(i3.access_perm_type, i1.PERM_TYPE_ANY)
def test_permissions_none(self):
i1 = item('root', 'url')
self.assertEqual(i1.permissions, [])
def test_int_permissions(self):
i1 = item('root', 'url', access_by_perms=[1, 2, 3])
self.assertEqual(i1.permissions, [1, 2, 3])
def test_import_project_sitetree_modules(self):
cls = get_model_class('MODEL_TREE')
self.assertIs(cls, Tree)
def test_get_model_class(self):
import_project_sitetree_modules()
def test_import_app_sitetree_module(self):
self.assertRaises(ImportError, import_app_sitetree_module, 'sitetre')
def test_get_app_n_model(self):
app, model = get_app_n_model('MODEL_TREE')
self.assertEqual(app, 'sitetree')
self.assertEqual(model, 'Tree')
self.assertRaises(ImproperlyConfigured, get_app_n_model, 'ALIAS_TRUNK')
def test_valid_string_permissions(self):
perm = Permission.objects.all()[0]
perm_name = '%s.%s' % (perm.content_type.app_label, perm.codename)
i1 = item('root', 'url', access_by_perms=perm_name)
self.assertEqual(i1.permissions, [perm])
def test_perm_obj_permissions(self):
perm = Permission.objects.all()[0]
i1 = item('root', 'url', access_by_perms=perm)
self.assertEqual(i1.permissions, [perm])
def test_bad_string_permissions(self):
self.assertRaises(ValueError, item, 'root', 'url', access_by_perms='bad name')
self.assertRaises(ValueError, item, 'root', 'url', access_by_perms='unknown.name')
self.assertRaises(ValueError, item, 'root', 'url', access_by_perms=42.2)
def test_access_restricted(self):
# Test that default is False
i0 = item('root', 'url', access_by_perms=1)
self.assertEqual(i0.access_restricted, True)
# True is respected
i1 = item('root', 'url')
self.assertEqual(i1.access_restricted, False)
class TestAdmin(SitetreeTest):
def test_redirects_handler(self):
def get_handler(referer, item_id=None):
req = MockRequest(referer, True, {
'HTTP_REFERER': referer
})
args = [req]
kwargs = {}
if item_id is not None:
kwargs['item_id'] = item_id
return redirects_handler(*args, **kwargs)
handler = get_handler('/')
self.assertEqual(handler._headers['location'][1], '/../')
handler = get_handler('/delete/')
self.assertEqual(handler._headers['location'][1], '/delete/../../')
handler = get_handler('/history/')
self.assertEqual(handler._headers['location'][1], '/history/../../')
handler = get_handler('/history/', 42)
self.assertEqual(handler._headers['location'][1], '/history/../')
def test_tree_item_admin(self):
admin = TreeItemAdmin(TreeItem, site)
admin.tree = Tree.objects.get(alias='main')
form = admin.get_form(MockRequest())
self.assertEqual(len(form.known_url_names), 3)
self.assertIn('articles_list', form.known_url_names)
self.assertIn('articles_detailed', form.known_url_names)
self.assertIn('url', form.known_url_names)
def test_tree_item_admin_get_tree(self):
main_tree = Tree.objects.get(alias='main')
main_tree_item = TreeItem.objects.filter(tree__alias='main')[0]
admin = TreeItemAdmin(TreeItem, site)
tree = admin.get_tree(MockRequest(), main_tree.pk)
self.assertEqual(tree.alias, 'main')
tree = admin.get_tree(MockRequest(), None, main_tree_item.pk)
self.assertEqual(tree.alias, 'main')
def test_tree_item_admin_item_move(self):
main_tree = Tree.objects.get(alias='main')
admin = TreeItemAdmin(TreeItem, site)
new_item_1 = TreeItem(title='title_1', sort_order=1, tree_id=main_tree.pk)
new_item_1.save()
new_item_2 = TreeItem(title='title_2', sort_order=2, tree_id=main_tree.pk)
new_item_2.save()
new_item_3 = TreeItem(title='title_3', sort_order=3, tree_id=main_tree.pk)
new_item_3.save()
admin.item_move(None, None, new_item_2.id, 'up')
self.assertEqual(TreeItem.objects.get(pk=new_item_1.id).sort_order, 2)
self.assertEqual(TreeItem.objects.get(pk=new_item_2.id).sort_order, 1)
self.assertEqual(TreeItem.objects.get(pk=new_item_3.id).sort_order, 3)
admin.item_move(None, None, new_item_1.id, 'down')
self.assertEqual(TreeItem.objects.get(pk=new_item_1.id).sort_order, 3)
self.assertEqual(TreeItem.objects.get(pk=new_item_2.id).sort_order, 1)
self.assertEqual(TreeItem.objects.get(pk=new_item_3.id).sort_order, 2)
def test_tree_item_admin_save_model(self):
main_tree = Tree.objects.get(alias='main')
tree_item = TreeItem.objects.filter(tree__alias='main')[0]
admin = TreeItemAdmin(TreeItem, site)
admin.tree = main_tree
admin.save_model(MockRequest(), tree_item, None, change=True)
self.assertIs(tree_item.tree, admin.tree)
def test_tree_admin(self):
admin = TreeAdmin(Tree, site)
urls = admin.get_urls()
self.assertIn('tree_id', urls[1]._regex)
class TestForms(SitetreeTest):
def test_basic(self):
form = TreeItemForm(tree='main', tree_item='root')
self.assertIn('tree_item', form.fields)
self.assertEqual(form.fields['tree_item'].tree, 'main')
self.assertEqual(form.fields['tree_item'].initial, 'root')
self.assertEqual(form.fields['tree_item'].choices[1][1], 'root')
class TestManagementCommands(SitetreeTest):
def setUp(self):
self.file_contents = (
'[{"pk": 2, "fields": {"alias": "/tree1/", "title": "tree one"}, "model": "sitetree.tree"}, '
'{"pk": 3, "fields": {"alias": "/tree2/", "title": "tree two"}, "model": "sitetree.tree"}, '
'{"pk": 7, "fields": {"access_restricted": false, "inmenu": true, "title": "tree item one",'
' "hidden": false, "description": "", "alias": null, "url": "/tree1/item1/", "access_loggedin": false,'
' "urlaspattern": false, "access_perm_type": 1, "tree": 2, "hint": "", "inbreadcrumbs": true,'
' "access_permissions": [], "sort_order": 7, "access_guest": false, "parent": null, "insitetree": true},'
' "model": "sitetree.treeitem"}]')
def test_sitetreedump(self):
stdout = sys.stdout
sys.stdout = StringIO()
call_command('sitetreedump')
output = loads(sys.stdout.getvalue())
sys.stdout = stdout
self.assertEqual(output[0]['model'], 'sitetree.tree')
self.assertEqual(output[5]['model'], 'sitetree.treeitem')
def test_sitetreeload(self):
try:
import __builtin__
patch_val = '__builtin__.open'
except ImportError:
# python3
patch_val = 'builtins.open'
with mock.patch(patch_val) as mock_file:
mock_file.return_value.__enter__ = lambda s: s
mock_file.return_value.__exit__ = mock.Mock()
mock_file.return_value.read.return_value = self.file_contents
call_command('sitetreeload', 'somefile.json')
self.assertTrue(Tree.objects.filter(title='tree one').exists())
self.assertTrue(Tree.objects.filter(title='tree two').exists())
self.assertTrue(TreeItem.objects.filter(title='tree item one').exists())
|
RamezIssac/django-sitetree
|
sitetree/tests.py
|
Python
|
bsd-3-clause
| 33,020
|
"""SCons.Tool.SCCS.py
Tool-specific initialization for SCCS.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add a Builder factory function and construction variables for
SCCS to an Environment."""
def SCCSFactory(env=env):
""" """
import SCons.Warnings as W
W.warn(W.DeprecatedSourceCodeWarning, """The SCCS() factory is deprecated and there is no replacement.""")
act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
return SCons.Builder.Builder(action = act, env = env)
#setattr(env, 'SCCS', SCCSFactory)
env.SCCS = SCCSFactory
env['SCCS'] = 'sccs'
env['SCCSFLAGS'] = SCons.Util.CLVar('')
env['SCCSGETFLAGS'] = SCons.Util.CLVar('')
env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET'
def exists(env):
return env.Detect('sccs')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Distrotech/scons
|
src/engine/SCons/Tool/SCCS.py
|
Python
|
mit
| 2,283
|
from django.contrib import admin
from django.urls import path
urlpatterns = [
path("admin/", admin.site.urls),
]
|
MarkusH/django-osm-field
|
tests/urls.py
|
Python
|
mit
| 118
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from django.db import models
from django.core.cache import cache
from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import iri_to_uri
from pootle_app.lib.util import RelatedManager
def get_permission_contenttype():
content_type = ContentType.objects.filter(name='pootle', app_label='pootle_app', model="directory")[0]
return content_type
def get_pootle_permission(codename):
# The content type of our permission
content_type = get_permission_contenttype()
# Get the pootle view permission
return Permission.objects.get(content_type=content_type, codename=codename)
def get_pootle_permissions(codenames=None):
"""gets the available rights and their localized names"""
content_type = get_permission_contenttype()
if codenames is not None:
permissions = Permission.objects.filter(content_type=content_type, codename__in=codenames)
else:
permissions = Permission.objects.filter(content_type=content_type)
return dict((permission.codename, permission) for permission in permissions)
def get_permissions_by_username(username, directory):
pootle_path = directory.pootle_path
path_parts = filter(None, pootle_path.split('/'))
key = iri_to_uri('Permissions:%s' % username)
permissions_cache = cache.get(key, {})
if pootle_path not in permissions_cache:
try:
permissionset = PermissionSet.objects.filter(
directory__in=directory.trail(only_dirs=False),
profile__user__username=username).order_by('-directory__pootle_path')[0]
except IndexError:
permissionset = None
if len(path_parts) > 1 and path_parts[0] != 'projects' and \
(permissionset is None or len(filter(None, permissionset.directory.pootle_path.split('/'))) < 2):
# active permission at language level or higher, check project level permission
try:
project_path = '/projects/%s/' % path_parts[1]
permissionset = PermissionSet.objects.get(directory__pootle_path=project_path, profile__user__username=username)
except PermissionSet.DoesNotExist:
pass
if permissionset:
permissions_cache[pootle_path] = permissionset.to_dict()
else:
permissions_cache[pootle_path] = None
cache.set(key, permissions_cache, settings.OBJECT_CACHE_TIMEOUT)
return permissions_cache[pootle_path]
def get_matching_permissions(profile, directory):
if profile.user.is_authenticated():
permissions = get_permissions_by_username(profile.user.username, directory)
if permissions is not None:
return permissions
permissions = get_permissions_by_username('default', directory)
if permissions is not None:
return permissions
permissions = get_permissions_by_username('nobody', directory)
return permissions
def check_profile_permission(profile, permission_codename, directory):
"""it checks if current user has the permission the perform C{permission_codename}"""
if profile.user.is_superuser:
return True
permissions = get_matching_permissions(profile, directory)
return "administrate" in permissions or permission_codename in permissions
def check_permission(permission_codename, request):
"""it checks if current user has the permission the perform C{permission_codename}"""
if request.user.is_superuser:
return True
return "administrate" in request.permissions or permission_codename in request.permissions
class PermissionSetManager(RelatedManager):
def get_by_natural_key(self, username, pootle_path):
return self.get(profile__user__username=username, directory__pootle_path=pootle_path)
class PermissionSet(models.Model):
objects = PermissionSetManager()
class Meta:
unique_together = ('profile', 'directory')
app_label = "pootle_app"
profile = models.ForeignKey('pootle_profile.PootleProfile', db_index=True)
directory = models.ForeignKey('pootle_app.Directory', db_index=True, related_name='permission_sets')
positive_permissions = models.ManyToManyField(Permission, db_index=True, related_name='permission_sets_positive')
# negative permissions are no longer used, kept around to scheme
# compatibility with older versions
negative_permissions = models.ManyToManyField(Permission, editable=False, related_name='permission_sets_negative')
def natural_key(self):
return (self.profile.user.username, self.directory.pootle_path)
natural_key.dependencies = ['pootle_app.Directory', 'pootle_profile.PootleProfile']
def __unicode__(self):
return "%s : %s" % (self.profile.user.username, self.directory.pootle_path)
def to_dict(self):
return dict((permission.codename, permission) for permission in self.positive_permissions.iterator())
def save(self, *args, **kwargs):
super(PermissionSet, self).save(*args, **kwargs)
key = iri_to_uri('Permissions:%s' % self.profile.user.username)
cache.delete(key)
def delete(self, *args, **kwargs):
super(PermissionSet, self).delete(*args, **kwargs)
key = iri_to_uri('Permissions:%s' % self.profile.user.username)
cache.delete(key)
|
tzabian/fuego-pootle
|
local_apps/pootle_app/models/permissions.py
|
Python
|
gpl-2.0
| 6,275
|
import sys
import os
import re
import imp
from itertools import count
from Tkinter import *
import tkSimpleDialog
import tkMessageBox
import webbrowser
import idlever
import WindowList
import SearchDialog
import GrepDialog
import ReplaceDialog
import PyParse
from configHandler import idleConf
import aboutDialog, textView, configDialog
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
def _find_module(fullname, path=None):
"""Version of imp.find_module() that handles hierarchical module names"""
file = None
for tgt in fullname.split('.'):
if file is not None:
file.close() # close intermediate files
(file, filename, descr) = imp.find_module(tgt, path)
if descr[2] == imp.PY_SOURCE:
break # find but not load the source file
module = imp.load_module(tgt, file, filename, descr)
try:
path = module.__path__
except AttributeError:
raise ImportError, 'No source for module ' + module.__name__
return file, filename, descr
class EditorWindow:
from Percolator import Percolator
from ColorDelegator import ColorDelegator
from UndoDelegator import UndoDelegator
from IOBinding import IOBinding
import Bindings
from Tkinter import Toplevel
from MultiStatusBar import MultiStatusBar
help_url = None
def __init__(self, flist=None, filename=None, key=None, root=None):
if EditorWindow.help_url is None:
dochome = os.path.join(sys.prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.prefix, 'Doc',
'Python%d%d.chm' % sys.version_info[:2])
if os.path.isfile(chmfile):
dochome = chmfile
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
else:
EditorWindow.help_url = "http://www.python.org/doc/current"
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict avalable to
#configDialog.py so it can access all EditorWindow instaces
self.top.instance_dict=flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.recent_files_path=os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.vbar = vbar = Scrollbar(top, name='vbar')
self.text_frame = text_frame = Frame(top)
self.width = idleConf.GetOption('main','EditorWindow','width')
self.text = text = Text(text_frame, name='text', padx=5, wrap='none',
foreground=idleConf.GetHighlight(currentTheme,
'normal',fgBg='fg'),
background=idleConf.GetHighlight(currentTheme,
'normal',fgBg='bg'),
highlightcolor=idleConf.GetHighlight(currentTheme,
'hilite',fgBg='fg'),
highlightbackground=idleConf.GetHighlight(currentTheme,
'hilite',fgBg='bg'),
insertbackground=idleConf.GetHighlight(currentTheme,
'cursor',fgBg='fg'),
width=self.width,
height=idleConf.GetOption('main','EditorWindow','height') )
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
text.bind("<<center-insert>>", self.center_insert_event)
text.bind("<<help>>", self.help_dialog)
text.bind("<<python-docs>>", self.python_docs)
text.bind("<<about-idle>>", self.about_dialog)
text.bind("<<open-config-dialog>>", self.config_dialog)
text.bind("<<open-module>>", self.open_module)
text.bind("<<do-nothing>>", lambda event: "break")
text.bind("<<select-all>>", self.select_all)
text.bind("<<remove-selection>>", self.remove_selection)
text.bind("<<find>>", self.find_event)
text.bind("<<find-again>>", self.find_again_event)
text.bind("<<find-in-files>>", self.find_in_files_event)
text.bind("<<find-selection>>", self.find_selection_event)
text.bind("<<replace>>", self.replace_event)
text.bind("<<goto-line>>", self.goto_line_event)
text.bind("<3>", self.right_menu_event)
text.bind("<<smart-backspace>>",self.smart_backspace_event)
text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
text.bind("<<smart-indent>>",self.smart_indent_event)
text.bind("<<indent-region>>",self.indent_region_event)
text.bind("<<dedent-region>>",self.dedent_region_event)
text.bind("<<comment-region>>",self.comment_region_event)
text.bind("<<uncomment-region>>",self.uncomment_region_event)
text.bind("<<tabify-region>>",self.tabify_region_event)
text.bind("<<untabify-region>>",self.untabify_region_event)
text.bind("<<toggle-tabs>>",self.toggle_tabs_event)
text.bind("<<change-indentwidth>>",self.change_indentwidth_event)
text.bind("<Left>", self.move_at_edge_if_selection(0))
text.bind("<Right>", self.move_at_edge_if_selection(1))
if flist:
flist.inversedict[self] = key
if key:
flist.dict[key] = self
text.bind("<<open-new-window>>", self.new_callback)
text.bind("<<close-all-windows>>", self.flist.close_all_callback)
text.bind("<<open-class-browser>>", self.open_class_browser)
text.bind("<<open-path-browser>>", self.open_path_browser)
self.set_status_bar()
vbar['command'] = text.yview
vbar.pack(side=RIGHT, fill=Y)
text['yscrollcommand'] = vbar.set
fontWeight='normal'
if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
fontWeight='bold'
text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
idleConf.GetOption('main','EditorWindow','font-size'),
fontWeight))
text_frame.pack(side=LEFT, fill=BOTH, expand=1)
text.pack(side=TOP, fill=BOTH, expand=1)
text.focus_set()
self.per = per = self.Percolator(text)
if self.ispythonsource(filename):
self.color = color = self.ColorDelegator()
per.insertfilter(color)
else:
self.color = None
self.undo = undo = self.UndoDelegator()
per.insertfilter(undo)
text.undo_block_start = undo.undo_block_start
text.undo_block_stop = undo.undo_block_stop
undo.set_saved_change_hook(self.saved_change_hook)
# IOBinding implements file I/O and printing functionality
self.io = io = self.IOBinding(self)
io.set_filename_change_hook(self.filename_change_hook)
# Create the recent files submenu
self.recent_files_menu = Menu(self.menubar)
self.menudict['file'].insert_cascade(3, label='Recent Files',
underline=0,
menu=self.recent_files_menu)
self.update_recent_files_list()
if filename:
if os.path.exists(filename) and not os.path.isdir(filename):
io.loadfile(filename)
else:
io.set_filename(filename)
self.saved_change_hook()
self.load_extensions()
menu = self.menudict.get('windows')
if menu:
end = menu.index("end")
if end is None:
end = -1
if end >= 0:
menu.add_separator()
end = end + 1
self.wmenu_end = end
WindowList.register_callback(self.postwindowsmenu)
# Some abstractions so IDLE extensions are cross-IDE
self.askyesno = tkMessageBox.askyesno
self.askinteger = tkSimpleDialog.askinteger
self.showerror = tkMessageBox.showerror
if self.extensions.has_key('AutoIndent'):
self.extensions['AutoIndent'].set_indentation_params(
self.ispythonsource(filename))
def new_callback(self, event):
dirname, basename = self.io.defaultfilename()
self.flist.new(dirname)
return "break"
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
self.text.bind('<KeyRelease>', self.set_line_and_column)
self.text.bind('<ButtonRelease>', self.set_line_and_column)
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(INSERT).split('.')
self.status_bar.set_label('column', 'Col: %s' % column)
self.status_bar.set_label('line', 'Ln: %s' % line)
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("format", "F_ormat"),
("run", "_Run"),
("options", "_Options"),
("windows", "_Windows"),
("help", "_Help"),
]
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
for name, label in self.menu_specs:
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name)
mbar.add_cascade(label=label, menu=menu, underline=underline)
self.fill_menus()
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
def postwindowsmenu(self):
# Only called when Windows menu exists
menu = self.menudict['windows']
end = menu.index("end")
if end is None:
end = -1
if end > self.wmenu_end:
menu.delete(self.wmenu_end+1, end)
WindowList.add_windows_to_menu(menu)
rmenu = None
def right_menu_event(self, event):
self.text.tag_remove("sel", "1.0", "end")
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
self.text.config(cursor="arrow")
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
rmenu_specs = [
# ("Label", "<<virtual-event>>"), ...
("Close", "<<close-window>>"), # Example
]
def make_rmenu(self):
rmenu = Menu(self.text, tearoff=0)
for label, eventname in self.rmenu_specs:
def command(text=self.text, eventname=eventname):
text.event_generate(eventname)
rmenu.add_command(label=label, command=command)
self.rmenu = rmenu
def about_dialog(self, event=None):
aboutDialog.AboutDialog(self.top,'About IDLE')
def config_dialog(self, event=None):
configDialog.ConfigDialog(self.top,'Settings')
def help_dialog(self, event=None):
fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
textView.TextViewer(self.top,'Help',fn)
def python_docs(self, event=None):
if sys.platform[:3] == 'win':
os.startfile(self.help_url)
else:
webbrowser.open(self.help_url)
return "break"
def cut(self,event):
self.text.event_generate("<<Cut>>")
return "break"
def copy(self,event):
self.text.event_generate("<<Copy>>")
return "break"
def paste(self,event):
self.text.event_generate("<<Paste>>")
return "break"
def select_all(self, event=None):
self.text.tag_add("sel", "1.0", "end-1c")
self.text.mark_set("insert", "1.0")
self.text.see("insert")
return "break"
def remove_selection(self, event=None):
self.text.tag_remove("sel", "1.0", "end")
self.text.see("insert")
def move_at_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
self_text_index = self.text.index
self_text_mark_set = self.text.mark_set
edges_table = ("sel.first+1c", "sel.last-1c")
def move_at_edge(event):
if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
try:
self_text_index("sel.first")
self_text_mark_set("insert", edges_table[edge_index])
except TclError:
pass
return move_at_edge
def find_event(self, event):
SearchDialog.find(self.text)
return "break"
def find_again_event(self, event):
SearchDialog.find_again(self.text)
return "break"
def find_selection_event(self, event):
SearchDialog.find_selection(self.text)
return "break"
def find_in_files_event(self, event):
GrepDialog.grep(self.text, self.io, self.flist)
return "break"
def replace_event(self, event):
ReplaceDialog.replace(self.text)
return "break"
def goto_line_event(self, event):
text = self.text
lineno = tkSimpleDialog.askinteger("Goto",
"Go to line number:",parent=text)
if lineno is None:
return "break"
if lineno <= 0:
text.bell()
return "break"
text.mark_set("insert", "%d.0" % lineno)
text.see("insert")
def open_module(self, event=None):
# XXX Shouldn't this be in IOBinding or in FileList?
try:
name = self.text.get("sel.first", "sel.last")
except TclError:
name = ""
else:
name = name.strip()
name = tkSimpleDialog.askstring("Module",
"Enter the name of a Python module\n"
"to search on sys.path and open:",
parent=self.text, initialvalue=name)
if name:
name = name.strip()
if not name:
return
# XXX Ought to insert current file's directory in front of path
try:
(f, file, (suffix, mode, type)) = _find_module(name)
except (NameError, ImportError), msg:
tkMessageBox.showerror("Import error", str(msg), parent=self.text)
return
if type != imp.PY_SOURCE:
tkMessageBox.showerror("Unsupported type",
"%s is not a source module" % name, parent=self.text)
return
if f:
f.close()
if self.flist:
self.flist.open(file)
else:
self.io.loadfile(file)
def open_class_browser(self, event=None):
filename = self.io.filename
if not filename:
tkMessageBox.showerror(
"No filename",
"This buffer has no associated filename",
master=self.text)
self.text.focus_set()
return None
head, tail = os.path.split(filename)
base, ext = os.path.splitext(tail)
import ClassBrowser
ClassBrowser.ClassBrowser(self.flist, base, [head])
def open_path_browser(self, event=None):
import PathBrowser
PathBrowser.PathBrowser(self.flist)
def gotoline(self, lineno):
if lineno is not None and lineno > 0:
self.text.mark_set("insert", "%d.0" % lineno)
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", "insert", "insert +1l")
self.center()
def ispythonsource(self, filename):
if not filename:
return True
base, ext = os.path.splitext(os.path.basename(filename))
if os.path.normcase(ext) in (".py", ".pyw"):
return True
try:
f = open(filename)
line = f.readline()
f.close()
except IOError:
return False
return line.startswith('#!') and line.find('python') >= 0
def close_hook(self):
if self.flist:
self.flist.close_edit(self)
def set_close_hook(self, close_hook):
self.close_hook = close_hook
def filename_change_hook(self):
if self.flist:
self.flist.filename_changed_edit(self)
self.saved_change_hook()
self.top.update_windowlist_registry(self)
if self.ispythonsource(self.io.filename):
self.addcolorizer()
else:
self.rmcolorizer()
def addcolorizer(self):
if self.color:
return
self.per.removefilter(self.undo)
self.color = self.ColorDelegator()
self.per.insertfilter(self.color)
self.per.insertfilter(self.undo)
def rmcolorizer(self):
if not self.color:
return
self.per.removefilter(self.undo)
self.per.removefilter(self.color)
self.color = None
self.per.insertfilter(self.undo)
def ResetColorizer(self):
"Update the colour theme if it is changed"
# Called from configDialog.py
if self.color:
self.color = self.ColorDelegator()
self.per.insertfilter(self.color)
theme = idleConf.GetOption('main','Theme','name')
self.text.config(idleConf.GetHighlight(theme, "normal"))
def ResetFont(self):
"Update the text widgets' font if it is changed"
# Called from configDialog.py
fontWeight='normal'
if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
fontWeight='bold'
self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
idleConf.GetOption('main','EditorWindow','font-size'),
fontWeight))
def ResetKeybindings(self):
"Update the keybindings if they are changed"
# Called from configDialog.py
self.Bindings.default_keydefs=idleConf.GetCurrentKeySet()
keydefs = self.Bindings.default_keydefs
for event, keylist in keydefs.items():
self.text.event_delete(event)
self.apply_bindings()
#update menu accelerators
menuEventDict={}
for menu in self.Bindings.menudefs:
menuEventDict[menu[0]]={}
for item in menu[1]:
if item:
menuEventDict[menu[0]][prepstr(item[0])[1]]=item[1]
for menubarItem in self.menudict.keys():
menu=self.menudict[menubarItem]
end=menu.index(END)+1
for index in range(0,end):
if menu.type(index)=='command':
accel=menu.entrycget(index,'accelerator')
if accel:
itemName=menu.entrycget(index,'label')
event=''
if menuEventDict.has_key(menubarItem):
if menuEventDict[menubarItem].has_key(itemName):
event=menuEventDict[menubarItem][itemName]
if event:
accel=get_accelerator(keydefs, event)
menu.entryconfig(index,accelerator=accel)
def reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not (helpfile.startswith('www') or helpfile.startswith('http')):
url = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
os.startfile(helpfile)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
rf_list_file = open(self.recent_files_path,'r')
try:
rf_list = rf_list_file.readlines()
finally:
rf_list_file.close()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
rf_file = open(self.recent_files_path, 'w')
try:
rf_file.writelines(rf_list)
finally:
rf_file.close()
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict.keys():
menu = instance.recent_files_menu
menu.delete(1, END) # clear, and rebuild:
for i, file in zip(count(), rf_list):
file_name = file[0:-1] # zap \n
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + file_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long
elif short:
title = short
elif long:
title = long
else:
title = "Untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
return filename
def long_title(self):
return self.io.filename or ""
def center_insert_event(self, event):
self.center()
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
tuple = (map(int, m.groups()))
return tuple
def close_event(self, event):
self.close()
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if reply != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
WindowList.unregister_callback(self.postwindowsmenu)
if self.close_hook:
self.close_hook()
self.flist = None
colorizing = 0
self.unload_extensions()
self.io.close(); self.io = None
self.undo = None # XXX
if self.color:
colorizing = self.color.colorizing
doh = colorizing and self.top
self.color.close(doh) # Cancel colorization
self.text = None
self.tkinter_vars = None
self.per.close(); self.per = None
if not colorizing:
self.top.destroy()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in self.extensions.values():
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print "Failed to load extension", repr(name)
import traceback
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
def load_extension(self, name):
try:
mod = __import__(name, globals(), locals(), [])
except ImportError:
print "\nFailed to import extension: ", name
return None
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
return ins
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.Bindings.menudefs
if keydefs is None:
keydefs = self.Bindings.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError, name
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError, name
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tabwidth(self, newtabwidth):
text = self.text
if self.get_tabwidth() != newtabwidth:
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
### begin autoindent code ###
# usetabs true -> literal tab characters are used by indent and
# dedent cmds, possibly mixed with spaces if
# indentwidth is not a multiple of tabwidth
# false -> tab characters are converted to spaces by indent
# and dedent cmds, and ditto TAB keystrokes
# indentwidth is the number of characters per logical indent level.
# tabwidth is the display width of a literal tab character.
# CAUTION: telling Tk to use anything other than its default
# tab setting causes it to use an entirely different tabbing algorithm,
# treating tab stops as fixed distances from the left margin.
# Nobody expects this, so for now tabwidth should never be changed.
usetabs = 0
indentwidth = 4
tabwidth = 8 # for IDLE use, must remain 8 until Tk is fixed
# If context_use_ps1 is true, parsing searches back for a ps1 line;
# else searches for a popular (if, def, ...) Python stmt.
context_use_ps1 = 0
# When searching backwards for a reliable place to begin parsing,
# first start num_context_lines[0] lines back, then
# num_context_lines[1] lines back if that didn't work, and so on.
# The last value should be huge (larger than the # of lines in a
# conceivable file).
# Making the initial values larger slows things down more often.
num_context_lines = 50, 500, 5000000
def config(self, **options):
for key, value in options.items():
if key == 'usetabs':
self.usetabs = value
elif key == 'indentwidth':
self.indentwidth = value
elif key == 'tabwidth':
self.tabwidth = value
elif key == 'context_use_ps1':
self.context_use_ps1 = value
else:
raise KeyError, "bad option name: %r" % (key,)
# If ispythonsource and guess are true, guess a good value for
# indentwidth based on file content (if possible), and if
# indentwidth != tabwidth set usetabs false.
# In any case, adjust the Text widget's view of what a tab
# character means.
def set_indentation_params(self, ispythonsource, guess=1):
if guess and ispythonsource:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = 0
self.set_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want = ((have - 1) // self.indentwidth) * self.indentwidth
# Debug prompt is multilined....
last_line_of_prompt = sys.ps1.split('\n')[-1]
ncharsdeleted = 0
while 1:
if chars == last_line_of_prompt:
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.undo_block_start()
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", ' ' * (want - have))
text.undo_block_stop()
return "break"
def smart_indent_event(self, event):
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region & return
# indent one level
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
if index2line(first) != index2line(last):
return self.indent_region_event(event)
text.delete(first, last)
text.mark_set("insert", first)
prefix = text.get("insert linestart", "insert")
raw, effective = classifyws(prefix, self.tabwidth)
if raw == len(prefix):
# only whitespace to the left
self.reindent_to(effective + self.indentwidth)
else:
if self.usetabs:
pad = '\t'
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indentwidth
pad = ' ' * (n - effective % n)
text.insert("insert", pad)
text.see("insert")
return "break"
finally:
text.undo_block_stop()
def newline_and_indent_event(self, event):
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
line = text.get("insert linestart", "insert")
i, n = 0, len(line)
while i < n and line[i] in " \t":
i = i+1
if i == n:
# the cursor is in or at leading indentation in a continuation
# line; just inject an empty line at the start
text.insert("insert linestart", '\n')
return "break"
indent = line[:i]
# strip whitespace before insert point unless it's in the prompt
i = 0
last_line_of_prompt = sys.ps1.split('\n')[-1]
while line and line[-1] in " \t" and line != last_line_of_prompt:
line = line[:-1]
i = i+1
if i:
text.delete("insert - %d chars" % i, "insert")
# strip whitespace after insert point
while text.get("insert") in " \t":
text.delete("insert")
# start new line
text.insert("insert", '\n')
# adjust indentation for continuations and block
# open/close first need to find the last stmt
lno = index2line(text.index('insert'))
y = PyParse.Parser(self.indentwidth, self.tabwidth)
for context in self.num_context_lines:
startat = max(lno - context, 1)
startatindex = repr(startat) + ".0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
bod = y.find_good_parse_start(
self.context_use_ps1,
self._build_char_in_string_func(startatindex))
if bod is not None or startat == 1:
break
y.set_lo(bod or 0)
c = y.get_continuation_type()
if c != PyParse.C_NONE:
# The current stmt hasn't ended yet.
if c == PyParse.C_STRING:
# inside a string; just mimic the current indent
text.insert("insert", indent)
elif c == PyParse.C_BRACKET:
# line up with the first (if any) element of the
# last open bracket structure; else indent one
# level beyond the indent of the line with the
# last open bracket
self.reindent_to(y.compute_bracket_indent())
elif c == PyParse.C_BACKSLASH:
# if more than one line in this stmt already, just
# mimic the current indent; else if initial line
# has a start on an assignment stmt, indent to
# beyond leftmost =; else to beyond first chunk of
# non-whitespace on initial line
if y.get_num_lines_in_stmt() > 1:
text.insert("insert", indent)
else:
self.reindent_to(y.compute_backslash_indent())
else:
assert 0, "bogus continuation type %r" % (c,)
return "break"
# This line starts a brand new stmt; indent relative to
# indentation of initial line of closest preceding
# interesting stmt.
indent = y.get_base_indent_string()
text.insert("insert", indent)
if y.is_block_opener():
self.smart_indent_event(event)
elif indent and y.is_block_closer():
self.smart_backspace_event(event)
return "break"
finally:
text.see("insert")
text.undo_block_stop()
# Our editwin provides a is_char_in_string function that works
# with a Tk text index, but PyParse only knows about offsets into
# a string. This builds a function for PyParse that accepts an
# offset.
def _build_char_in_string_func(self, startindex):
def inner(offset, _startindex=startindex,
_icis=self.is_char_in_string):
return _icis(_startindex + "+%dc" % offset)
return inner
def indent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = effective + self.indentwidth
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def dedent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = max(effective - self.indentwidth, 0)
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def comment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines) - 1):
line = lines[pos]
lines[pos] = '##' + line
self.set_region(head, tail, chars, lines)
def uncomment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if not line:
continue
if line[:2] == '##':
line = line[2:]
elif line[:1] == '#':
line = line[1:]
lines[pos] = line
self.set_region(head, tail, chars, lines)
def tabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, tabwidth)
ntabs, nspaces = divmod(effective, tabwidth)
lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
self.set_region(head, tail, chars, lines)
def untabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
for pos in range(len(lines)):
lines[pos] = lines[pos].expandtabs(tabwidth)
self.set_region(head, tail, chars, lines)
def toggle_tabs_event(self, event):
if self.askyesno(
"Toggle tabs",
"Turn tabs " + ("on", "off")[self.usetabs] + "?",
parent=self.text):
self.usetabs = not self.usetabs
return "break"
# XXX this isn't bound to anything -- see class tabwidth comments
def change_tabwidth_event(self, event):
new = self._asktabwidth()
if new != self.tabwidth:
self.tabwidth = new
self.set_indentation_params(0, guess=0)
return "break"
def change_indentwidth_event(self, event):
new = self.askinteger(
"Indent width",
"New indent width (2-16)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
if new and new != self.indentwidth:
self.indentwidth = new
return "break"
def get_region(self):
text = self.text
first, last = self.get_selection_indices()
if first and last:
head = text.index(first + " linestart")
tail = text.index(last + "-1c lineend +1c")
else:
head = text.index("insert linestart")
tail = text.index("insert lineend +1c")
chars = text.get(head, tail)
lines = chars.split("\n")
return head, tail, chars, lines
def set_region(self, head, tail, chars, lines):
text = self.text
newchars = "\n".join(lines)
if newchars == chars:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.mark_set("insert", head)
text.undo_block_start()
text.delete(head, tail)
text.insert(head, newchars)
text.undo_block_stop()
text.tag_add("sel", head, "insert")
# Make string that displays as n leading blanks.
def _make_blanks(self, n):
if self.usetabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return '\t' * ntabs + ' ' * nspaces
else:
return ' ' * n
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
def reindent_to(self, column):
text = self.text
text.undo_block_start()
if text.compare("insert linestart", "!=", "insert"):
text.delete("insert linestart", "insert")
if column:
text.insert("insert", self._make_blanks(column))
text.undo_block_stop()
def _asktabwidth(self):
return self.askinteger(
"Tab width",
"Spaces per tab? (2-16)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16) or self.tabwidth
# Guess indentwidth from text content.
# Return guessed indentwidth. This should not be believed unless
# it's in a reasonable range (e.g., it will be 0 if no indented
# blocks are found).
def guess_indent(self):
opener, indented = IndentSearcher(self.text, self.tabwidth).run()
if opener and indented:
raw, indentsmall = classifyws(opener, self.tabwidth)
raw, indentlarge = classifyws(indented, self.tabwidth)
else:
indentsmall = indentlarge = 0
return indentlarge - indentsmall
# "line.col" -> line, as an int
def index2line(index):
return int(float(index))
# Look at the leading whitespace in s.
# Return pair (# of leading ws characters,
# effective # of leading blanks after expanding
# tabs to width tabwidth)
def classifyws(s, tabwidth):
raw = effective = 0
for ch in s:
if ch == ' ':
raw = raw + 1
effective = effective + 1
elif ch == '\t':
raw = raw + 1
effective = (effective // tabwidth + 1) * tabwidth
else:
break
return raw, effective
import tokenize
_tokenize = tokenize
del tokenize
class IndentSearcher:
# .run() chews over the Text widget, looking for a block opener
# and the stmt following it. Returns a pair,
# (line containing block opener, line containing stmt)
# Either or both may be None.
def __init__(self, text, tabwidth):
self.text = text
self.tabwidth = tabwidth
self.i = self.finished = 0
self.blkopenline = self.indentedline = None
def readline(self):
if self.finished:
return ""
i = self.i = self.i + 1
mark = repr(i) + ".0"
if self.text.compare(mark, ">=", "end"):
return ""
return self.text.get(mark, mark + " lineend+1c")
def tokeneater(self, type, token, start, end, line,
INDENT=_tokenize.INDENT,
NAME=_tokenize.NAME,
OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
if self.finished:
pass
elif type == NAME and token in OPENERS:
self.blkopenline = line
elif type == INDENT and self.blkopenline:
self.indentedline = line
self.finished = 1
def run(self):
save_tabsize = _tokenize.tabsize
_tokenize.tabsize = self.tabwidth
try:
try:
_tokenize.tokenize(self.readline, self.tokeneater)
except _tokenize.TokenError:
# since we cut off the tokenizer early, we can trigger
# spurious errors
pass
finally:
_tokenize.tabsize = save_tabsize
return self.blkopenline, self.indentedline
### end autoindent code ###
def prepstr(s):
# Helper to extract the underscore from a string, e.g.
# prepstr("Co_py") returns (2, "Copy").
i = s.find('_')
if i >= 0:
s = s[:i] + s[i+1:]
return i, s
keynames = {
'bracketleft': '[',
'bracketright': ']',
'slash': '/',
}
def get_accelerator(keydefs, eventname):
keylist = keydefs.get(eventname)
if not keylist:
return ""
s = keylist[0]
s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
s = re.sub("Key-", "", s)
s = re.sub("Cancel","Ctrl-Break",s) # dscherer@cmu.edu
s = re.sub("Control-", "Ctrl-", s)
s = re.sub("-", "+", s)
s = re.sub("><", " ", s)
s = re.sub("<", "", s)
s = re.sub(">", "", s)
return s
def fixwordbreaks(root):
# Make sure that Tk's double-click and next/previous word
# operations use our definition of a word (i.e. an identifier)
tk = root.tk
tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
tk.call('set', 'tcl_wordchars', '[a-zA-Z0-9_]')
tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]')
def test():
root = Tk()
fixwordbreaks(root)
root.withdraw()
if sys.argv[1:]:
filename = sys.argv[1]
else:
filename = None
edit = EditorWindow(root=root, filename=filename)
edit.set_close_hook(root.quit)
root.mainloop()
root.destroy()
if __name__ == '__main__':
test()
|
trivoldus28/pulsarch-verilog
|
tools/local/bas-release/bas,3.9-SunOS-i386/lib/python/lib/python2.4/idlelib/EditorWindow.py
|
Python
|
gpl-2.0
| 52,368
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-22 23:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fleetactivitytracking', '0003_auto_20160906_2354'),
]
operations = [
migrations.AlterField(
model_name='fatlink',
name='fleet',
field=models.CharField(default='', max_length=254),
),
]
|
Adarnof/allianceauth
|
allianceauth/fleetactivitytracking/migrations/0004_make_strings_more_stringy.py
|
Python
|
gpl-2.0
| 478
|
# -*- coding: utf-8 -*-
import json
import re
from time import sleep
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
from module.network.RequestFactory import getURL
class OpenloadIo(SimpleHoster):
__name__ = "OpenloadIo"
__type__ = "hoster"
__version__ = "0.06"
__status__ = "testing"
_FILE_ID_PATTERN = '/f/([\w\-_]+)/?'
__pattern__ = r'https?://(?:www\.)?openload\.(?:co|io)' + _FILE_ID_PATTERN
__description__ = """Openload.co hoster plugin"""
__license__ = "GPLv3"
__authors__ = [(None, None)]
# The API reference, that this implementation uses is available at https://openload.co/api
_API_BASE_URL = 'https://api.openload.co/1'
_DOWNLOAD_TICKET_URI_PATTERN = '/file/dlticket?file={0}'
_DOWNLOAD_FILE_URI_PATTERN = '/file/dl?file={0}&ticket={1}'
_FILE_INFO_URI_PATTERN = '/file/info?file={0}'
def setup(self):
self.multiDL = True
self.chunk_limit = 1
@classmethod
def get_info(cls, url="", html=""):
file_id = re.findall(cls._FILE_ID_PATTERN, url, re.I)
if not file_id:
return super(OpenloadIo, cls).get_info(url)
file_id = file_id[0]
info_json = cls._load_json(cls._FILE_INFO_URI_PATTERN.format(file_id))
file_info = info_json['result'][file_id]
return {'name': file_info['name'],
'size': file_info['size'],
'status': 3 if url.strip() else 8,
'url': url}
def handle_free(self, pyfile):
# If the link is being handled here, then it matches the file_id_pattern,
# therefore, we can call [0] safely.
file_id = re.findall(self._FILE_ID_PATTERN, pyfile.url, re.I)[0]
ticket_json = self._load_json(self._DOWNLOAD_TICKET_URI_PATTERN.format(file_id))
wait_time = ticket_json['result']['wait_time']
sleep(wait_time + 0.1)
ticket = ticket_json['result']['ticket']
download_json = self._load_json(self._DOWNLOAD_FILE_URI_PATTERN.format(file_id, ticket))
self.link = download_json['result']['url']
@classmethod
def _load_json(cls, uri):
return json.loads(
getURL(cls._API_BASE_URL + uri))
getInfo = create_getInfo(OpenloadIo)
|
mationic/pyload
|
module/plugins/hoster/OpenloadIo.py
|
Python
|
gpl-3.0
| 2,286
|
from __future__ import absolute_import # Use 3.x import model.
#-------------------------------------------------------------------------------
__all__ = ()
from numpy.linalg import *
from numpy.fft import *
from numpy.random import *
from numpy.ma import *
from numpy import *
from . import gui
|
jpbarraca/dRonin
|
python/logviewer/__init__.py
|
Python
|
gpl-3.0
| 300
|
""" Views related to Account Settings. """
import logging
from datetime import datetime
import six
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_http_methods
from django_countries import countries
import third_party_auth
from edxmako.shortcuts import render_to_response
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.commerce.utils import EcommerceService
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
from openedx.core.djangoapps.lang_pref.api import all_languages, released_languages
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.accounts.toggles import (
should_redirect_to_account_microfrontend,
should_redirect_to_order_history_microfrontend
)
from openedx.core.djangoapps.user_api.preferences.api import get_user_preferences
from openedx.core.lib.edx_api_utils import get_edx_api_data
from openedx.core.lib.time_zone_utils import TIME_ZONE_CHOICES
from openedx.features.enterprise_support.api import enterprise_customer_for_request
from openedx.features.enterprise_support.utils import update_account_settings_context_for_enterprise
from student.models import UserProfile
from third_party_auth import pipeline
from util.date_utils import strftime_localized
log = logging.getLogger(__name__)
@login_required
@require_http_methods(['GET'])
def account_settings(request):
"""Render the current user's account settings page.
Args:
request (HttpRequest)
Returns:
HttpResponse: 200 if the page was sent successfully
HttpResponse: 302 if not logged in (redirect to login page)
HttpResponse: 405 if using an unsupported HTTP method
Example usage:
GET /account/settings
"""
if should_redirect_to_account_microfrontend():
url = settings.ACCOUNT_MICROFRONTEND_URL
duplicate_provider = pipeline.get_duplicate_provider(messages.get_messages(request))
if duplicate_provider:
url = '{url}?{params}'.format(
url=url,
params=six.moves.urllib.parse.urlencode({
'duplicate_provider': duplicate_provider,
}),
)
return redirect(url)
context = account_settings_context(request)
return render_to_response('student_account/account_settings.html', context)
def account_settings_context(request):
""" Context for the account settings page.
Args:
request: The request object.
Returns:
dict
"""
user = request.user
year_of_birth_options = [(six.text_type(year), six.text_type(year)) for year in UserProfile.VALID_YEARS]
try:
user_orders = get_user_orders(user)
except: # pylint: disable=bare-except
log.exception('Error fetching order history from Otto.')
# Return empty order list as account settings page expect a list and
# it will be broken if exception raised
user_orders = []
beta_language = {}
dark_lang_config = DarkLangConfig.current()
if dark_lang_config.enable_beta_languages:
user_preferences = get_user_preferences(user)
pref_language = user_preferences.get('pref-lang')
if pref_language in dark_lang_config.beta_languages_list:
beta_language['code'] = pref_language
beta_language['name'] = settings.LANGUAGE_DICT.get(pref_language)
context = {
'auth': {},
'duplicate_provider': None,
'nav_hidden': True,
'fields': {
'country': {
'options': list(countries),
}, 'gender': {
'options': [(choice[0], _(choice[1])) for choice in UserProfile.GENDER_CHOICES],
}, 'language': {
'options': released_languages(),
}, 'level_of_education': {
'options': [(choice[0], _(choice[1])) for choice in UserProfile.LEVEL_OF_EDUCATION_CHOICES],
}, 'password': {
'url': reverse('password_reset'),
}, 'year_of_birth': {
'options': year_of_birth_options,
}, 'preferred_language': {
'options': all_languages(),
}, 'time_zone': {
'options': TIME_ZONE_CHOICES,
}
},
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'password_reset_support_link': configuration_helpers.get_value(
'PASSWORD_RESET_SUPPORT_LINK', settings.PASSWORD_RESET_SUPPORT_LINK
) or settings.SUPPORT_SITE_LINK,
'user_accounts_api_url': reverse("accounts_api", kwargs={'username': user.username}),
'user_preferences_api_url': reverse('preferences_api', kwargs={'username': user.username}),
'disable_courseware_js': True,
'show_program_listing': ProgramsApiConfig.is_enabled(),
'show_dashboard_tabs': True,
'order_history': user_orders,
'disable_order_history_tab': should_redirect_to_order_history_microfrontend(),
'enable_account_deletion': configuration_helpers.get_value(
'ENABLE_ACCOUNT_DELETION', settings.FEATURES.get('ENABLE_ACCOUNT_DELETION', False)
),
'extended_profile_fields': _get_extended_profile_fields(),
'beta_language': beta_language,
}
enterprise_customer = enterprise_customer_for_request(request)
update_account_settings_context_for_enterprise(context, enterprise_customer, user)
if third_party_auth.is_enabled():
# If the account on the third party provider is already connected with another edX account,
# we display a message to the user.
context['duplicate_provider'] = pipeline.get_duplicate_provider(messages.get_messages(request))
auth_states = pipeline.get_provider_user_states(user)
context['auth']['providers'] = [{
'id': state.provider.provider_id,
'name': state.provider.name, # The name of the provider e.g. Facebook
'connected': state.has_account, # Whether the user's edX account is connected with the provider.
# If the user is not connected, they should be directed to this page to authenticate
# with the particular provider, as long as the provider supports initiating a login.
'connect_url': pipeline.get_login_url(
state.provider.provider_id,
pipeline.AUTH_ENTRY_ACCOUNT_SETTINGS,
# The url the user should be directed to after the auth process has completed.
redirect_url=reverse('account_settings'),
),
'accepts_logins': state.provider.accepts_logins,
# If the user is connected, sending a POST request to this url removes the connection
# information for this provider from their edX account.
'disconnect_url': pipeline.get_disconnect_url(state.provider.provider_id, state.association_id),
# We only want to include providers if they are either currently available to be logged
# in with, or if the user is already authenticated with them.
} for state in auth_states if state.provider.display_for_login or state.has_account]
return context
def get_user_orders(user):
"""Given a user, get the detail of all the orders from the Ecommerce service.
Args:
user (User): The user to authenticate as when requesting ecommerce.
Returns:
list of dict, representing orders returned by the Ecommerce service.
"""
user_orders = []
commerce_configuration = CommerceConfiguration.current()
user_query = {'username': user.username}
use_cache = commerce_configuration.is_cache_enabled
cache_key = commerce_configuration.CACHE_KEY + '.' + str(user.id) if use_cache else None
api = ecommerce_api_client(user)
commerce_user_orders = get_edx_api_data(
commerce_configuration, 'orders', api=api, querystring=user_query, cache_key=cache_key
)
for order in commerce_user_orders:
if order['status'].lower() == 'complete':
date_placed = datetime.strptime(order['date_placed'], "%Y-%m-%dT%H:%M:%SZ")
order_data = {
'number': order['number'],
'price': order['total_excl_tax'],
'order_date': strftime_localized(date_placed, 'SHORT_DATE'),
'receipt_url': EcommerceService().get_receipt_page_url(order['number']),
'lines': order['lines'],
}
user_orders.append(order_data)
return user_orders
def _get_extended_profile_fields():
"""Retrieve the extended profile fields from site configuration to be shown on the
Account Settings page
Returns:
A list of dicts. Each dict corresponds to a single field. The keys per field are:
"field_name" : name of the field stored in user_profile.meta
"field_label" : The label of the field.
"field_type" : TextField or ListField
"field_options": a list of tuples for options in the dropdown in case of ListField
"""
extended_profile_fields = []
fields_already_showing = ['username', 'name', 'email', 'pref-lang', 'country', 'time_zone', 'level_of_education',
'gender', 'year_of_birth', 'language_proficiencies', 'social_links']
field_labels_map = {
"first_name": _(u"First Name"),
"last_name": _(u"Last Name"),
"city": _(u"City"),
"state": _(u"State/Province/Region"),
"company": _(u"Company"),
"title": _(u"Title"),
"job_title": _(u"Job Title"),
"mailing_address": _(u"Mailing address"),
"goals": _(u"Tell us why you're interested in {platform_name}").format(
platform_name=configuration_helpers.get_value("PLATFORM_NAME", settings.PLATFORM_NAME)
),
"profession": _(u"Profession"),
"specialty": _(u"Specialty")
}
extended_profile_field_names = configuration_helpers.get_value('extended_profile_fields', [])
for field_to_exclude in fields_already_showing:
if field_to_exclude in extended_profile_field_names:
extended_profile_field_names.remove(field_to_exclude)
extended_profile_field_options = configuration_helpers.get_value('EXTRA_FIELD_OPTIONS', [])
extended_profile_field_option_tuples = {}
for field in extended_profile_field_options.keys():
field_options = extended_profile_field_options[field]
extended_profile_field_option_tuples[field] = [(option.lower(), option) for option in field_options]
for field in extended_profile_field_names:
field_dict = {
"field_name": field,
"field_label": field_labels_map.get(field, field),
}
field_options = extended_profile_field_option_tuples.get(field)
if field_options:
field_dict["field_type"] = "ListField"
field_dict["field_options"] = field_options
else:
field_dict["field_type"] = "TextField"
extended_profile_fields.append(field_dict)
return extended_profile_fields
|
edx-solutions/edx-platform
|
openedx/core/djangoapps/user_api/accounts/settings_views.py
|
Python
|
agpl-3.0
| 11,596
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Smile Subscription Access Rights',
'version': '1.0',
'depends': [
'base',
'purchase',
'subscription',
],
'author': 'Smile',
'description': """
# Subscription Access Rights (Groups)
""",
'summary': '',
'website': 'http://www.smile.fr',
'category': 'Purchase Management',
'sequence': 10,
'data': [
'views/subscription_view.xml',
'security/subscription_security.xml',
'security/ir.model.access.csv',
],
'demo_xml': [],
'test': [],
'auto_install': False,
'installable': True,
'application': False,
}
|
tiexinliu/odoo_addons
|
smile_subscription_access_rights/__openerp__.py
|
Python
|
agpl-3.0
| 1,615
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RRlang(RPackage):
"""A toolbox for working with base types, core R features like the
condition system, and core 'Tidyverse' features like tidy evaluation."""
homepage = "https://cran.r-project.org/web/packages/rlang/index.html"
url = "https://cran.r-project.org/src/contrib/rlang_0.1.4.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/rlang"
version('0.1.4', 'daed5104d557c0cbfb4a654ec8ffb579')
version('0.1.2', '170f8cf7b61898040643515a1746a53a')
version('0.1.1', '38a51a0b8f8487eb52b4f3d986313682')
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/r-rlang/package.py
|
Python
|
lgpl-2.1
| 1,824
|
from temboo.Library.Amazon.CloudDrive.Folders.CreateFolder import CreateFolder, CreateFolderInputSet, CreateFolderResultSet, CreateFolderChoreographyExecution
from temboo.Library.Amazon.CloudDrive.Folders.GetFolderMetadata import GetFolderMetadata, GetFolderMetadataInputSet, GetFolderMetadataResultSet, GetFolderMetadataChoreographyExecution
from temboo.Library.Amazon.CloudDrive.Folders.ListFolders import ListFolders, ListFoldersInputSet, ListFoldersResultSet, ListFoldersChoreographyExecution
|
jordanemedlock/psychtruths
|
temboo/core/Library/Amazon/CloudDrive/Folders/__init__.py
|
Python
|
apache-2.0
| 497
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Prints a header file to be used with SELECTIVE_REGISTRATION.
An example of command-line usage is:
bazel build tensorflow/python/tools:print_selective_registration_header && \
bazel-bin/tensorflow/python/tools/print_selective_registration_header \
--graphs=path/to/graph.pb > ops_to_register.h
Then when compiling tensorflow, include ops_to_register.h in the include search
path and pass -DSELECTIVE_REGISTRATION and -DSUPPORT_SELECTIVE_REGISTRATION
- see core/framework/selective_registration.h for more details.
When compiling for Android:
bazel build -c opt --copt="-DSELECTIVE_REGISTRATION" \
--copt="-DSUPPORT_SELECTIVE_REGISTRATION" \
//tensorflow/tools/android/inference_interface:libtensorflow_inference.so \
--host_crosstool_top=@bazel_tools//tools/cpp:toolchain \
--crosstool_top=//external:android/crosstool --cpu=armeabi-v7a
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from absl import app
from tensorflow.python.tools import selective_registration_header_lib
FLAGS = None
def main(unused_argv):
graphs = FLAGS.graphs.split(',')
print(
selective_registration_header_lib.get_header(graphs,
FLAGS.proto_fileformat,
FLAGS.default_ops))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--graphs',
type=str,
default='',
help='Comma-separated list of paths to model files to be analyzed.',
required=True)
parser.add_argument(
'--proto_fileformat',
type=str,
default='rawproto',
help='Format of proto file, either textproto, rawproto or ops_list. The '
'ops_list is the file contains the list of ops in JSON format. Ex: '
'"[["Add", "BinaryOp<CPUDevice, functor::add<float>>"]]".')
parser.add_argument(
'--default_ops',
type=str,
default='NoOp:NoOp,_Recv:RecvOp,_Send:SendOp',
help='Default operator:kernel pairs to always include implementation for.'
'Pass "all" to have all operators and kernels included; note that this '
'should be used only when it is useful compared with simply not using '
'selective registration, as it can in some cases limit the effect of '
'compilation caches')
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
frreiss/tensorflow-fred
|
tensorflow/python/tools/print_selective_registration_header.py
|
Python
|
apache-2.0
| 3,254
|
# This file is part of the MapProxy project.
# Copyright (C) 2010-2012 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import os
try:
import json; json
except ImportError:
# test skipped later
json = None
from mapproxy.test.image import img_from_buf
from mapproxy.test.http import mock_single_req_httpd
from mapproxy.test.system import module_setup, module_teardown, SystemTest, make_base_config
from mapproxy.request.wms import WMS111MapRequest, WMS111FeatureInfoRequest, WMS111CapabilitiesRequest
from mapproxy.test.helper import validate_with_dtd
from mapproxy.test.http import mock_httpd
from mapproxy.test.image import create_tmp_image
from mapproxy.test.system.test_wms import is_111_exception
from mapproxy.util.fs import ensure_directory
from mapproxy.cache.renderd import has_renderd_support
from nose.tools import eq_
from nose.plugins.skip import SkipTest
test_config = {}
base_config = make_base_config(test_config)
def setup_module():
if not has_renderd_support():
raise SkipTest("requests required")
module_setup(test_config, 'renderd_client.yaml', with_cache_data=True)
def teardown_module():
module_teardown(test_config)
try:
from http.server import BaseHTTPRequestHandler
except ImportError:
from BaseHTTPServer import BaseHTTPRequestHandler
class TestWMS111(SystemTest):
config = test_config
def setup(self):
SystemTest.setup(self)
self.common_req = WMS111MapRequest(url='/service?', param=dict(service='WMS',
version='1.1.1'))
self.common_map_req = WMS111MapRequest(url='/service?', param=dict(service='WMS',
version='1.1.1', bbox='-180,0,0,80', width='200', height='200',
layers='wms_cache', srs='EPSG:4326', format='image/png',
exceptions='xml',
styles='', request='GetMap'))
self.common_fi_req = WMS111FeatureInfoRequest(url='/service?',
param=dict(x='10', y='20', width='200', height='200', layers='wms_cache',
format='image/png', query_layers='wms_cache', styles='',
bbox='1000,400,2000,1400', srs='EPSG:900913'))
def test_wms_capabilities(self):
req = WMS111CapabilitiesRequest(url='/service?').copy_with_request_params(self.common_req)
resp = self.app.get(req)
eq_(resp.content_type, 'application/vnd.ogc.wms_xml')
xml = resp.lxml
eq_(xml.xpath('//GetMap//OnlineResource/@xlink:href',
namespaces=dict(xlink="http://www.w3.org/1999/xlink"))[0],
'http://localhost/service?')
layer_names = set(xml.xpath('//Layer/Layer/Name/text()'))
expected_names = set(['direct', 'wms_cache',
'tms_cache'])
eq_(layer_names, expected_names)
assert validate_with_dtd(xml, dtd_name='wms/1.1.1/WMS_MS_Capabilities.dtd')
def test_get_map(self):
test_self = self
class req_handler(BaseHTTPRequestHandler):
def do_POST(self):
length = int(self.headers['content-length'])
json_data = self.rfile.read(length)
task = json.loads(json_data.decode('utf-8'))
eq_(task['command'], 'tile')
# request main tile of metatile
eq_(task['tiles'], [[15, 17, 5]])
eq_(task['cache_identifier'], 'wms_cache_GLOBAL_MERCATOR')
eq_(task['priority'], 100)
# this id should not change for the same tile/cache_identifier combination
eq_(task['id'], 'aeb52b506e4e82d0a1edf649d56e0451cfd5862c')
# manually create tile renderd should create
tile_filename = os.path.join(test_self.config['cache_dir'],
'wms_cache_EPSG900913/05/000/000/016/000/000/016.jpeg')
ensure_directory(tile_filename)
with open(tile_filename, 'wb') as f:
f.write(create_tmp_image((256, 256), format='jpeg', color=(255, 0, 100)))
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(b'{"status": "ok"}')
def log_request(self, code, size=None):
pass
with mock_single_req_httpd(('localhost', 42423), req_handler):
self.common_map_req.params['bbox'] = '0,0,9,9'
resp = self.app.get(self.common_map_req)
img = img_from_buf(resp.body)
main_color = sorted(img.convert('RGBA').getcolors())[-1]
# check for red color (jpeg/png conversion requires fuzzy comparision)
assert main_color[0] == 40000
assert main_color[1][0] > 250
assert main_color[1][1] < 5
assert 95 < main_color[1][2] < 105
assert main_color[1][3] == 255
eq_(resp.content_type, 'image/png')
self.created_tiles.append('wms_cache_EPSG900913/05/000/000/016/000/000/016.jpeg')
def test_get_map_error(self):
class req_handler(BaseHTTPRequestHandler):
def do_POST(self):
length = int(self.headers['content-length'])
json_data = self.rfile.read(length)
task = json.loads(json_data.decode('utf-8'))
eq_(task['command'], 'tile')
# request main tile of metatile
eq_(task['tiles'], [[15, 17, 5]])
eq_(task['cache_identifier'], 'wms_cache_GLOBAL_MERCATOR')
eq_(task['priority'], 100)
# this id should not change for the same tile/cache_identifier combination
eq_(task['id'], 'aeb52b506e4e82d0a1edf649d56e0451cfd5862c')
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(b'{"status": "error", "error_message": "barf"}')
def log_request(self, code, size=None):
pass
with mock_single_req_httpd(('localhost', 42423), req_handler):
self.common_map_req.params['bbox'] = '0,0,9,9'
resp = self.app.get(self.common_map_req)
eq_(resp.content_type, 'application/vnd.ogc.se_xml')
is_111_exception(resp.lxml, re_msg='Error from renderd: barf')
def test_get_map_connection_error(self):
self.common_map_req.params['bbox'] = '0,0,9,9'
resp = self.app.get(self.common_map_req)
eq_(resp.content_type, 'application/vnd.ogc.se_xml')
is_111_exception(resp.lxml, re_msg='Error while communicating with renderd:')
def test_get_map_non_json_response(self):
class req_handler(BaseHTTPRequestHandler):
def do_POST(self):
length = int(self.headers['content-length'])
json_data = self.rfile.read(length)
json.loads(json_data.decode('utf-8'))
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(b'{"invalid')
def log_request(self, code, size=None):
pass
with mock_single_req_httpd(('localhost', 42423), req_handler):
self.common_map_req.params['bbox'] = '0,0,9,9'
resp = self.app.get(self.common_map_req)
eq_(resp.content_type, 'application/vnd.ogc.se_xml')
is_111_exception(resp.lxml, re_msg='Error while communicating with renderd: invalid JSON')
def test_get_featureinfo(self):
expected_req = ({'path': r'/service?LAYERs=foo,bar&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
'&VERSION=1.1.1&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=foo,bar&X=10&Y=20&feature_count=100'},
{'body': b'info', 'headers': {'content-type': 'text/plain'}})
with mock_httpd(('localhost', 42423), [expected_req]):
self.common_fi_req.params['feature_count'] = 100
resp = self.app.get(self.common_fi_req)
eq_(resp.content_type, 'text/plain')
eq_(resp.body, b'info')
class TestTiles(SystemTest):
config = test_config
def test_get_tile(self):
test_self = self
class req_handler(BaseHTTPRequestHandler):
def do_POST(self):
length = int(self.headers['content-length'])
json_data = self.rfile.read(length)
task = json.loads(json_data.decode('utf-8'))
eq_(task['command'], 'tile')
eq_(task['tiles'], [[10, 20, 6]])
eq_(task['cache_identifier'], 'tms_cache_GLOBAL_MERCATOR')
eq_(task['priority'], 100)
# this id should not change for the same tile/cache_identifier combination
eq_(task['id'], 'cf35c1c927158e188d8fbe0db380c1772b536da9')
# manually create tile renderd should create
tile_filename = os.path.join(test_self.config['cache_dir'],
'tms_cache_EPSG900913/06/000/000/010/000/000/020.png')
ensure_directory(tile_filename)
with open(tile_filename, 'wb') as f:
f.write(b"foobaz")
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(b'{"status": "ok"}')
def log_request(self, code, size=None):
pass
with mock_single_req_httpd(('localhost', 42423), req_handler):
resp = self.app.get('/tiles/tms_cache/EPSG900913/6/10/20.png')
eq_(resp.content_type, 'image/png')
eq_(resp.body, b'foobaz')
self.created_tiles.append('tms_cache_EPSG900913/06/000/000/010/000/000/020.png')
def test_get_tile_error(self):
class req_handler(BaseHTTPRequestHandler):
def do_POST(self):
length = int(self.headers['content-length'])
json_data = self.rfile.read(length)
task = json.loads(json_data.decode('utf-8'))
eq_(task['command'], 'tile')
eq_(task['tiles'], [[10, 20, 7]])
eq_(task['cache_identifier'], 'tms_cache_GLOBAL_MERCATOR')
eq_(task['priority'], 100)
# this id should not change for the same tile/cache_identifier combination
eq_(task['id'], 'c24b8c3247afec34fd0a53e5d3706e977877ef47')
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(b'{"status": "error", "error_message": "you told me to fail"}')
def log_request(self, code, size=None):
pass
with mock_single_req_httpd(('localhost', 42423), req_handler):
resp = self.app.get('/tiles/tms_cache/EPSG900913/7/10/20.png', status=500)
eq_(resp.content_type, 'text/plain')
eq_(resp.body, b'Error from renderd: you told me to fail')
|
geoadmin/mapproxy
|
mapproxy/test/system/test_renderd_client.py
|
Python
|
apache-2.0
| 11,857
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cloud Controller: Implementation of EC2 REST API calls, which are
dispatched to other nodes via AMQP RPC. State is via distributed
datastore.
"""
import base64
import time
from oslo.config import cfg
from nova.api.ec2 import ec2utils
from nova.api.ec2 import inst_state
from nova.api.metadata import password
from nova.api import validator
from nova import availability_zones
from nova import block_device
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import vm_states
from nova import db
from nova import exception
from nova.image import s3
from nova import network
from nova.network.security_group import neutron_driver
from nova.objects import instance as instance_obj
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova import quota
from nova import servicegroup
from nova import utils
from nova import volume
ec2_opts = [
cfg.StrOpt('ec2_host',
default='$my_ip',
help='the ip of the ec2 api server'),
cfg.StrOpt('ec2_dmz_host',
default='$my_ip',
help='the internal ip of the ec2 api server'),
cfg.IntOpt('ec2_port',
default=8773,
help='the port of the ec2 api server'),
cfg.StrOpt('ec2_scheme',
default='http',
help='the protocol to use when connecting to the ec2 api '
'server (http, https)'),
cfg.StrOpt('ec2_path',
default='/services/Cloud',
help='the path prefix used to call the ec2 api server'),
cfg.ListOpt('region_list',
default=[],
help='list of region=fqdn pairs separated by commas'),
]
CONF = cfg.CONF
CONF.register_opts(ec2_opts)
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('vpn_key_suffix', 'nova.cloudpipe.pipelib')
CONF.import_opt('internal_service_availability_zone',
'nova.availability_zones')
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
def validate_ec2_id(val):
if not validator.validate_str()(val):
raise exception.InvalidInstanceIDMalformed(val=val)
try:
ec2utils.ec2_id_to_id(val)
except exception.InvalidEc2Id:
raise exception.InvalidInstanceIDMalformed(val=val)
# EC2 API can return the following values as documented in the EC2 API
# http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/
# ApiReference-ItemType-InstanceStateType.html
# pending 0 | running 16 | shutting-down 32 | terminated 48 | stopping 64 |
# stopped 80
_STATE_DESCRIPTION_MAP = {
None: inst_state.PENDING,
vm_states.ACTIVE: inst_state.RUNNING,
vm_states.BUILDING: inst_state.PENDING,
vm_states.DELETED: inst_state.TERMINATED,
vm_states.SOFT_DELETED: inst_state.TERMINATED,
vm_states.STOPPED: inst_state.STOPPED,
vm_states.PAUSED: inst_state.PAUSE,
vm_states.SUSPENDED: inst_state.SUSPEND,
vm_states.RESCUED: inst_state.RESCUE,
vm_states.RESIZED: inst_state.RESIZE,
}
def _state_description(vm_state, _shutdown_terminate):
"""Map the vm state to the server status string."""
# Note(maoy): We do not provide EC2 compatibility
# in shutdown_terminate flag behavior. So we ignore
# it here.
name = _STATE_DESCRIPTION_MAP.get(vm_state, vm_state)
return {'code': inst_state.name_to_code(name),
'name': name}
def _parse_block_device_mapping(bdm):
"""Parse BlockDeviceMappingItemType into flat hash
BlockDevicedMapping.<N>.DeviceName
BlockDevicedMapping.<N>.Ebs.SnapshotId
BlockDevicedMapping.<N>.Ebs.VolumeSize
BlockDevicedMapping.<N>.Ebs.DeleteOnTermination
BlockDevicedMapping.<N>.Ebs.NoDevice
BlockDevicedMapping.<N>.VirtualName
=> remove .Ebs and allow volume id in SnapshotId
"""
ebs = bdm.pop('ebs', None)
if ebs:
ec2_id = ebs.pop('snapshot_id', None)
if ec2_id:
if ec2_id.startswith('snap-'):
bdm['snapshot_id'] = ec2utils.ec2_snap_id_to_uuid(ec2_id)
elif ec2_id.startswith('vol-'):
bdm['volume_id'] = ec2utils.ec2_vol_id_to_uuid(ec2_id)
ebs.setdefault('delete_on_termination', True)
bdm.update(ebs)
return bdm
def _properties_get_mappings(properties):
return block_device.mappings_prepend_dev(properties.get('mappings', []))
def _format_block_device_mapping(bdm):
"""Construct BlockDeviceMappingItemType
{'device_name': '...', 'snapshot_id': , ...}
=> BlockDeviceMappingItemType
"""
keys = (('deviceName', 'device_name'),
('virtualName', 'virtual_name'))
item = {}
for name, k in keys:
if k in bdm:
item[name] = bdm[k]
if bdm.get('no_device'):
item['noDevice'] = True
if ('snapshot_id' in bdm) or ('volume_id' in bdm):
ebs_keys = (('snapshotId', 'snapshot_id'),
('snapshotId', 'volume_id'), # snapshotId is abused
('volumeSize', 'volume_size'),
('deleteOnTermination', 'delete_on_termination'))
ebs = {}
for name, k in ebs_keys:
if k in bdm:
if k == 'snapshot_id':
ebs[name] = ec2utils.id_to_ec2_snap_id(bdm[k])
elif k == 'volume_id':
ebs[name] = ec2utils.id_to_ec2_vol_id(bdm[k])
else:
ebs[name] = bdm[k]
assert 'snapshotId' in ebs
item['ebs'] = ebs
return item
def _format_mappings(properties, result):
"""Format multiple BlockDeviceMappingItemType."""
mappings = [{'virtualName': m['virtual'], 'deviceName': m['device']}
for m in _properties_get_mappings(properties)
if block_device.is_swap_or_ephemeral(m['virtual'])]
block_device_mapping = [_format_block_device_mapping(bdm) for bdm in
properties.get('block_device_mapping', [])]
# NOTE(yamahata): overwrite mappings with block_device_mapping
for bdm in block_device_mapping:
for i in range(len(mappings)):
if bdm['deviceName'] == mappings[i]['deviceName']:
del mappings[i]
break
mappings.append(bdm)
# NOTE(yamahata): trim ebs.no_device == true. Is this necessary?
mappings = [bdm for bdm in mappings if not (bdm.get('noDevice', False))]
if mappings:
result['blockDeviceMapping'] = mappings
def db_to_inst_obj(context, db_instance):
# NOTE(danms): This is a temporary helper method for converting
# Instance DB objects to NovaObjects without needing to re-query.
inst_obj = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), db_instance,
expected_attrs=['system_metadata', 'metadata'])
return inst_obj
class CloudController(object):
"""CloudController provides the critical dispatch between
inbound API calls through the endpoint and messages
sent to the other nodes.
"""
def __init__(self):
self.image_service = s3.S3ImageService()
self.network_api = network.API()
self.volume_api = volume.API()
self.security_group_api = get_cloud_security_group_api()
self.compute_api = compute.API(network_api=self.network_api,
volume_api=self.volume_api,
security_group_api=self.security_group_api)
self.keypair_api = compute_api.KeypairAPI()
self.servicegroup_api = servicegroup.API()
def __str__(self):
return 'CloudController'
def _enforce_valid_instance_ids(self, context, instance_ids):
# NOTE(mikal): Amazon's implementation of the EC2 API requires that
# _all_ instance ids passed in be valid.
instances = {}
if instance_ids:
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
instances[ec2_id] = instance
return instances
def _get_image_state(self, image):
# NOTE(vish): fallback status if image_state isn't set
state = image.get('status')
if state == 'active':
state = 'available'
return image['properties'].get('image_state', state)
def describe_availability_zones(self, context, **kwargs):
if ('zone_name' in kwargs and
'verbose' in kwargs['zone_name'] and
context.is_admin):
return self._describe_availability_zones_verbose(context,
**kwargs)
else:
return self._describe_availability_zones(context, **kwargs)
def _describe_availability_zones(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
result = []
for zone in available_zones:
# Hide internal_service_availability_zone
if zone == CONF.internal_service_availability_zone:
continue
result.append({'zoneName': zone,
'zoneState': "available"})
for zone in not_available_zones:
result.append({'zoneName': zone,
'zoneState': "not available"})
return {'availabilityZoneInfo': result}
def _describe_availability_zones_verbose(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
# Available services
enabled_services = db.service_get_all(context, False)
enabled_services = availability_zones.set_availability_zones(context,
enabled_services)
zone_hosts = {}
host_services = {}
for service in enabled_services:
zone_hosts.setdefault(service['availability_zone'], [])
if service['host'] not in zone_hosts[service['availability_zone']]:
zone_hosts[service['availability_zone']].append(
service['host'])
host_services.setdefault(service['availability_zone'] +
service['host'], [])
host_services[service['availability_zone'] + service['host']].\
append(service)
result = []
for zone in available_zones:
result.append({'zoneName': zone,
'zoneState': "available"})
for host in zone_hosts[zone]:
result.append({'zoneName': '|- %s' % host,
'zoneState': ''})
for service in host_services[zone + host]:
alive = self.servicegroup_api.service_is_up(service)
art = (alive and ":-)") or "XXX"
active = 'enabled'
if service['disabled']:
active = 'disabled'
result.append({'zoneName': '| |- %s' % service['binary'],
'zoneState': ('%s %s %s'
% (active, art,
service['updated_at']))})
for zone in not_available_zones:
result.append({'zoneName': zone,
'zoneState': "not available"})
return {'availabilityZoneInfo': result}
def describe_regions(self, context, region_name=None, **kwargs):
if CONF.region_list:
regions = []
for region in CONF.region_list:
name, _sep, host = region.partition('=')
endpoint = '%s://%s:%s%s' % (CONF.ec2_scheme,
host,
CONF.ec2_port,
CONF.ec2_path)
regions.append({'regionName': name,
'regionEndpoint': endpoint})
else:
regions = [{'regionName': 'nova',
'regionEndpoint': '%s://%s:%s%s' % (CONF.ec2_scheme,
CONF.ec2_host,
CONF.ec2_port,
CONF.ec2_path)}]
return {'regionInfo': regions}
def describe_snapshots(self,
context,
snapshot_id=None,
owner=None,
restorable_by=None,
**kwargs):
if snapshot_id:
snapshots = []
for ec2_id in snapshot_id:
internal_id = ec2utils.ec2_snap_id_to_uuid(ec2_id)
snapshot = self.volume_api.get_snapshot(
context,
snapshot_id=internal_id)
snapshots.append(snapshot)
else:
snapshots = self.volume_api.get_all_snapshots(context)
formatted_snapshots = []
for s in snapshots:
formatted = self._format_snapshot(context, s)
if formatted:
formatted_snapshots.append(formatted)
return {'snapshotSet': formatted_snapshots}
def _format_snapshot(self, context, snapshot):
# NOTE(mikal): this is just a set of strings in cinder. If they
# implement an enum, then we should move this code to use it. The
# valid ec2 statuses are "pending", "completed", and "error".
status_map = {'new': 'pending',
'creating': 'pending',
'available': 'completed',
'active': 'completed',
'deleting': 'pending',
'deleted': None,
'error': 'error'}
mapped_status = status_map.get(snapshot['status'], snapshot['status'])
if not mapped_status:
return None
s = {}
s['snapshotId'] = ec2utils.id_to_ec2_snap_id(snapshot['id'])
s['volumeId'] = ec2utils.id_to_ec2_vol_id(snapshot['volume_id'])
s['status'] = mapped_status
s['startTime'] = snapshot['created_at']
s['progress'] = snapshot['progress']
s['ownerId'] = snapshot['project_id']
s['volumeSize'] = snapshot['volume_size']
s['description'] = snapshot['display_description']
return s
def create_snapshot(self, context, volume_id, **kwargs):
validate_ec2_id(volume_id)
LOG.audit(_("Create snapshot of volume %s"), volume_id,
context=context)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
args = (context, volume_id, kwargs.get('name'),
kwargs.get('description'))
if kwargs.get('force', False):
snapshot = self.volume_api.create_snapshot_force(*args)
else:
snapshot = self.volume_api.create_snapshot(*args)
db.ec2_snapshot_create(context, snapshot['id'])
return self._format_snapshot(context, snapshot)
def delete_snapshot(self, context, snapshot_id, **kwargs):
snapshot_id = ec2utils.ec2_snap_id_to_uuid(snapshot_id)
self.volume_api.delete_snapshot(context, snapshot_id)
return True
def describe_key_pairs(self, context, key_name=None, **kwargs):
key_pairs = self.keypair_api.get_key_pairs(context, context.user_id)
if key_name is not None:
key_pairs = [x for x in key_pairs if x['name'] in key_name]
#If looking for non existent key pair
if key_name is not None and not key_pairs:
msg = _('Could not find key pair(s): %s') % ','.join(key_name)
raise exception.KeypairNotFound(message=msg)
result = []
for key_pair in key_pairs:
# filter out the vpn keys
suffix = CONF.vpn_key_suffix
if context.is_admin or not key_pair['name'].endswith(suffix):
result.append({
'keyName': key_pair['name'],
'keyFingerprint': key_pair['fingerprint'],
})
return {'keySet': result}
def create_key_pair(self, context, key_name, **kwargs):
LOG.audit(_("Create key pair %s"), key_name, context=context)
keypair, private_key = self.keypair_api.create_key_pair(
context, context.user_id, key_name)
return {'keyName': key_name,
'keyFingerprint': keypair['fingerprint'],
'keyMaterial': private_key}
# TODO(vish): when context is no longer an object, pass it here
def import_key_pair(self, context, key_name, public_key_material,
**kwargs):
LOG.audit(_("Import key %s"), key_name, context=context)
public_key = base64.b64decode(public_key_material)
keypair = self.keypair_api.import_key_pair(context,
context.user_id,
key_name,
public_key)
return {'keyName': key_name,
'keyFingerprint': keypair['fingerprint']}
def delete_key_pair(self, context, key_name, **kwargs):
LOG.audit(_("Delete key pair %s"), key_name, context=context)
try:
self.keypair_api.delete_key_pair(context, context.user_id,
key_name)
except exception.NotFound:
# aws returns true even if the key doesn't exist
pass
return True
def describe_security_groups(self, context, group_name=None, group_id=None,
**kwargs):
search_opts = ec2utils.search_opts_from_filters(kwargs.get('filter'))
raw_groups = self.security_group_api.list(context,
group_name,
group_id,
context.project_id,
search_opts=search_opts)
groups = [self._format_security_group(context, g) for g in raw_groups]
return {'securityGroupInfo':
list(sorted(groups,
key=lambda k: (k['ownerId'], k['groupName'])))}
def _format_security_group(self, context, group):
g = {}
g['groupDescription'] = group['description']
g['groupName'] = group['name']
g['ownerId'] = group['project_id']
g['ipPermissions'] = []
for rule in group['rules']:
r = {}
r['groups'] = []
r['ipRanges'] = []
if rule['group_id']:
if rule.get('grantee_group'):
source_group = rule['grantee_group']
r['groups'] += [{'groupName': source_group['name'],
'userId': source_group['project_id']}]
else:
# rule is not always joined with grantee_group
# for example when using neutron driver.
source_group = self.security_group_api.get(
context, id=rule['group_id'])
r['groups'] += [{'groupName': source_group.get('name'),
'userId': source_group.get('project_id')}]
if rule['protocol']:
r['ipProtocol'] = rule['protocol'].lower()
r['fromPort'] = rule['from_port']
r['toPort'] = rule['to_port']
g['ipPermissions'] += [dict(r)]
else:
for protocol, min_port, max_port in (('icmp', -1, -1),
('tcp', 1, 65535),
('udp', 1, 65535)):
r['ipProtocol'] = protocol
r['fromPort'] = min_port
r['toPort'] = max_port
g['ipPermissions'] += [dict(r)]
else:
r['ipProtocol'] = rule['protocol']
r['fromPort'] = rule['from_port']
r['toPort'] = rule['to_port']
r['ipRanges'] += [{'cidrIp': rule['cidr']}]
g['ipPermissions'] += [r]
return g
def _rule_args_to_dict(self, context, kwargs):
rules = []
if 'groups' not in kwargs and 'ip_ranges' not in kwargs:
rule = self._rule_dict_last_step(context, **kwargs)
if rule:
rules.append(rule)
return rules
if 'ip_ranges' in kwargs:
rules = self._cidr_args_split(kwargs)
else:
rules = [kwargs]
finalset = []
for rule in rules:
if 'groups' in rule:
groups_values = self._groups_args_split(rule)
for groups_value in groups_values:
final = self._rule_dict_last_step(context, **groups_value)
finalset.append(final)
else:
final = self._rule_dict_last_step(context, **rule)
finalset.append(final)
return finalset
def _cidr_args_split(self, kwargs):
cidr_args_split = []
cidrs = kwargs['ip_ranges']
for key, cidr in cidrs.iteritems():
mykwargs = kwargs.copy()
del mykwargs['ip_ranges']
mykwargs['cidr_ip'] = cidr['cidr_ip']
cidr_args_split.append(mykwargs)
return cidr_args_split
def _groups_args_split(self, kwargs):
groups_args_split = []
groups = kwargs['groups']
for key, group in groups.iteritems():
mykwargs = kwargs.copy()
del mykwargs['groups']
if 'group_name' in group:
mykwargs['source_security_group_name'] = group['group_name']
if 'user_id' in group:
mykwargs['source_security_group_owner_id'] = group['user_id']
if 'group_id' in group:
mykwargs['source_security_group_id'] = group['group_id']
groups_args_split.append(mykwargs)
return groups_args_split
def _rule_dict_last_step(self, context, to_port=None, from_port=None,
ip_protocol=None, cidr_ip=None, user_id=None,
source_security_group_name=None,
source_security_group_owner_id=None):
if source_security_group_name:
source_project_id = self._get_source_project_id(context,
source_security_group_owner_id)
source_security_group = db.security_group_get_by_name(
context.elevated(),
source_project_id,
source_security_group_name)
notfound = exception.SecurityGroupNotFound
if not source_security_group:
raise notfound(security_group_id=source_security_group_name)
group_id = source_security_group['id']
return self.security_group_api.new_group_ingress_rule(
group_id, ip_protocol, from_port, to_port)
else:
cidr = self.security_group_api.parse_cidr(cidr_ip)
return self.security_group_api.new_cidr_ingress_rule(
cidr, ip_protocol, from_port, to_port)
def _validate_group_identifier(self, group_name, group_id):
if not group_name and not group_id:
err = _("need group_name or group_id")
raise exception.MissingParameter(reason=err)
def _validate_rulevalues(self, rulesvalues):
if not rulesvalues:
err = _("can't build a valid rule")
raise exception.MissingParameter(reason=err)
def _validate_security_group_protocol(self, values):
validprotocols = ['tcp', 'udp', 'icmp', '6', '17', '1']
if 'ip_protocol' in values and \
values['ip_protocol'] not in validprotocols:
protocol = values['ip_protocol']
err = _("Invalid IP protocol %(protocol)s") % \
{'protocol': protocol}
raise exception.InvalidParameterValue(message=err)
def revoke_security_group_ingress(self, context, group_name=None,
group_id=None, **kwargs):
self._validate_group_identifier(group_name, group_id)
security_group = self.security_group_api.get(context, group_name,
group_id)
prevalues = kwargs.get('ip_permissions', [kwargs])
rule_ids = []
for values in prevalues:
rulesvalues = self._rule_args_to_dict(context, values)
self._validate_rulevalues(rulesvalues)
for values_for_rule in rulesvalues:
values_for_rule['parent_group_id'] = security_group['id']
rule_ids.append(self.security_group_api.rule_exists(
security_group, values_for_rule))
rule_ids = [id for id in rule_ids if id]
if rule_ids:
self.security_group_api.remove_rules(context, security_group,
rule_ids)
return True
msg = _("No rule for the specified parameters.")
raise exception.InvalidParameterValue(message=msg)
# TODO(soren): This has only been tested with Boto as the client.
# Unfortunately, it seems Boto is using an old API
# for these operations, so support for newer API versions
# is sketchy.
def authorize_security_group_ingress(self, context, group_name=None,
group_id=None, **kwargs):
self._validate_group_identifier(group_name, group_id)
security_group = self.security_group_api.get(context, group_name,
group_id)
prevalues = kwargs.get('ip_permissions', [kwargs])
postvalues = []
for values in prevalues:
self._validate_security_group_protocol(values)
rulesvalues = self._rule_args_to_dict(context, values)
self._validate_rulevalues(rulesvalues)
for values_for_rule in rulesvalues:
values_for_rule['parent_group_id'] = security_group['id']
if self.security_group_api.rule_exists(security_group,
values_for_rule):
raise exception.SecurityGroupRuleExists(
rule=values_for_rule)
postvalues.append(values_for_rule)
if postvalues:
self.security_group_api.add_rules(context, security_group['id'],
security_group['name'], postvalues)
return True
msg = _("No rule for the specified parameters.")
raise exception.InvalidParameterValue(message=msg)
def _get_source_project_id(self, context, source_security_group_owner_id):
if source_security_group_owner_id:
# Parse user:project for source group.
source_parts = source_security_group_owner_id.split(':')
# If no project name specified, assume it's same as user name.
# Since we're looking up by project name, the user name is not
# used here. It's only read for EC2 API compatibility.
if len(source_parts) == 2:
source_project_id = source_parts[1]
else:
source_project_id = source_parts[0]
else:
source_project_id = context.project_id
return source_project_id
def create_security_group(self, context, group_name, group_description):
if isinstance(group_name, unicode):
group_name = group_name.encode('utf-8')
if CONF.ec2_strict_validation:
# EC2 specification gives constraints for name and description:
# Accepts alphanumeric characters, spaces, dashes, and underscores
allowed = '^[a-zA-Z0-9_\- ]+$'
self.security_group_api.validate_property(group_name, 'name',
allowed)
self.security_group_api.validate_property(group_description,
'description', allowed)
else:
# Amazon accepts more symbols.
# So, allow POSIX [:print:] characters.
allowed = r'^[\x20-\x7E]+$'
self.security_group_api.validate_property(group_name, 'name',
allowed)
group_ref = self.security_group_api.create_security_group(
context, group_name, group_description)
return {'securityGroupSet': [self._format_security_group(context,
group_ref)]}
def delete_security_group(self, context, group_name=None, group_id=None,
**kwargs):
if not group_name and not group_id:
err = _("need group_name or group_id")
raise exception.MissingParameter(reason=err)
security_group = self.security_group_api.get(context, group_name,
group_id)
self.security_group_api.destroy(context, security_group)
return True
def get_password_data(self, context, instance_id, **kwargs):
# instance_id may be passed in as a list of instances
if isinstance(instance_id, list):
ec2_id = instance_id[0]
else:
ec2_id = instance_id
validate_ec2_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
output = password.extract_password(instance)
# NOTE(vish): this should be timestamp from the metadata fields
# but it isn't important enough to implement properly
now = timeutils.utcnow()
return {"InstanceId": ec2_id,
"Timestamp": now,
"passwordData": output}
def get_console_output(self, context, instance_id, **kwargs):
LOG.audit(_("Get console output for instance %s"), instance_id,
context=context)
# instance_id may be passed in as a list of instances
if isinstance(instance_id, list):
ec2_id = instance_id[0]
else:
ec2_id = instance_id
validate_ec2_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
output = self.compute_api.get_console_output(context, instance)
now = timeutils.utcnow()
return {"InstanceId": ec2_id,
"Timestamp": now,
"output": base64.b64encode(output)}
def describe_volumes(self, context, volume_id=None, **kwargs):
if volume_id:
volumes = []
for ec2_id in volume_id:
validate_ec2_id(ec2_id)
internal_id = ec2utils.ec2_vol_id_to_uuid(ec2_id)
volume = self.volume_api.get(context, internal_id)
volumes.append(volume)
else:
volumes = self.volume_api.get_all(context)
volumes = [self._format_volume(context, v) for v in volumes]
return {'volumeSet': volumes}
def _format_volume(self, context, volume):
valid_ec2_api_volume_status_map = {
'attaching': 'in-use',
'detaching': 'in-use'}
instance_ec2_id = None
if volume.get('instance_uuid', None):
instance_uuid = volume['instance_uuid']
instance = db.instance_get_by_uuid(context.elevated(),
instance_uuid)
instance_ec2_id = ec2utils.id_to_ec2_inst_id(instance_uuid)
v = {}
v['volumeId'] = ec2utils.id_to_ec2_vol_id(volume['id'])
v['status'] = valid_ec2_api_volume_status_map.get(volume['status'],
volume['status'])
v['size'] = volume['size']
v['availabilityZone'] = volume['availability_zone']
v['createTime'] = volume['created_at']
if volume['attach_status'] == 'attached':
v['attachmentSet'] = [{'attachTime': volume['attach_time'],
'deleteOnTermination': False,
'device': volume['mountpoint'],
'instanceId': instance_ec2_id,
'status': 'attached',
'volumeId': v['volumeId']}]
else:
v['attachmentSet'] = [{}]
if volume.get('snapshot_id') is not None:
v['snapshotId'] = ec2utils.id_to_ec2_snap_id(volume['snapshot_id'])
else:
v['snapshotId'] = None
return v
def create_volume(self, context, **kwargs):
snapshot_ec2id = kwargs.get('snapshot_id', None)
if snapshot_ec2id is not None:
snapshot_id = ec2utils.ec2_snap_id_to_uuid(kwargs['snapshot_id'])
snapshot = self.volume_api.get_snapshot(context, snapshot_id)
LOG.audit(_("Create volume from snapshot %s"), snapshot_ec2id,
context=context)
else:
snapshot = None
LOG.audit(_("Create volume of %s GB"),
kwargs.get('size'),
context=context)
create_kwargs = dict(snapshot=snapshot,
volume_type=kwargs.get('volume_type'),
metadata=kwargs.get('metadata'),
availability_zone=kwargs.get('availability_zone'))
volume = self.volume_api.create(context,
kwargs.get('size'),
kwargs.get('name'),
kwargs.get('description'),
**create_kwargs)
db.ec2_volume_create(context, volume['id'])
# TODO(vish): Instance should be None at db layer instead of
# trying to lazy load, but for now we turn it into
# a dict to avoid an error.
return self._format_volume(context, dict(volume))
def delete_volume(self, context, volume_id, **kwargs):
validate_ec2_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
self.volume_api.delete(context, volume_id)
return True
def attach_volume(self, context,
volume_id,
instance_id,
device, **kwargs):
validate_ec2_id(instance_id)
validate_ec2_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid)
LOG.audit(_('Attach volume %(volume_id)s to instance %(instance_id)s '
'at %(device)s'),
{'volume_id': volume_id,
'instance_id': instance_id,
'device': device},
context=context)
self.compute_api.attach_volume(context, instance, volume_id, device)
volume = self.volume_api.get(context, volume_id)
return {'attachTime': volume['attach_time'],
'device': volume['mountpoint'],
'instanceId': ec2utils.id_to_ec2_inst_id(instance_uuid),
'requestId': context.request_id,
'status': volume['attach_status'],
'volumeId': ec2utils.id_to_ec2_vol_id(volume_id)}
def _get_instance_from_volume(self, context, volume):
if volume['instance_uuid']:
try:
return db.instance_get_by_uuid(context,
volume['instance_uuid'])
except exception.InstanceNotFound:
pass
raise exception.VolumeUnattached(volume_id=volume['id'])
def detach_volume(self, context, volume_id, **kwargs):
validate_ec2_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
LOG.audit(_("Detach volume %s"), volume_id, context=context)
volume = self.volume_api.get(context, volume_id)
instance = self._get_instance_from_volume(context, volume)
self.compute_api.detach_volume(context, instance, volume)
return {'attachTime': volume['attach_time'],
'device': volume['mountpoint'],
'instanceId': ec2utils.id_to_ec2_inst_id(
volume['instance_uuid']),
'requestId': context.request_id,
'status': volume['attach_status'],
'volumeId': ec2utils.id_to_ec2_vol_id(volume_id)}
def _format_kernel_id(self, context, instance_ref, result, key):
kernel_uuid = instance_ref['kernel_id']
if kernel_uuid is None or kernel_uuid == '':
return
result[key] = ec2utils.glance_id_to_ec2_id(context, kernel_uuid, 'aki')
def _format_ramdisk_id(self, context, instance_ref, result, key):
ramdisk_uuid = instance_ref['ramdisk_id']
if ramdisk_uuid is None or ramdisk_uuid == '':
return
result[key] = ec2utils.glance_id_to_ec2_id(context, ramdisk_uuid,
'ari')
def describe_instance_attribute(self, context, instance_id, attribute,
**kwargs):
def _unsupported_attribute(instance, result):
raise exception.InvalidAttribute(attr=attribute)
def _format_attr_block_device_mapping(instance, result):
tmp = {}
self._format_instance_root_device_name(instance, tmp)
self._format_instance_bdm(context, instance['uuid'],
tmp['rootDeviceName'], result)
def _format_attr_disable_api_termination(instance, result):
result['disableApiTermination'] = instance['disable_terminate']
def _format_attr_group_set(instance, result):
CloudController._format_group_set(instance, result)
def _format_attr_instance_initiated_shutdown_behavior(instance,
result):
if instance['shutdown_terminate']:
result['instanceInitiatedShutdownBehavior'] = 'terminate'
else:
result['instanceInitiatedShutdownBehavior'] = 'stop'
def _format_attr_instance_type(instance, result):
self._format_instance_type(instance, result)
def _format_attr_kernel(instance, result):
self._format_kernel_id(context, instance, result, 'kernel')
def _format_attr_ramdisk(instance, result):
self._format_ramdisk_id(context, instance, result, 'ramdisk')
def _format_attr_root_device_name(instance, result):
self._format_instance_root_device_name(instance, result)
def _format_attr_source_dest_check(instance, result):
_unsupported_attribute(instance, result)
def _format_attr_user_data(instance, result):
result['userData'] = base64.b64decode(instance['user_data'])
attribute_formatter = {
'blockDeviceMapping': _format_attr_block_device_mapping,
'disableApiTermination': _format_attr_disable_api_termination,
'groupSet': _format_attr_group_set,
'instanceInitiatedShutdownBehavior':
_format_attr_instance_initiated_shutdown_behavior,
'instanceType': _format_attr_instance_type,
'kernel': _format_attr_kernel,
'ramdisk': _format_attr_ramdisk,
'rootDeviceName': _format_attr_root_device_name,
'sourceDestCheck': _format_attr_source_dest_check,
'userData': _format_attr_user_data,
}
fn = attribute_formatter.get(attribute)
if fn is None:
raise exception.InvalidAttribute(attr=attribute)
validate_ec2_id(instance_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid)
result = {'instance_id': instance_id}
fn(instance, result)
return result
def describe_instances(self, context, **kwargs):
# Optional DescribeInstances argument
instance_id = kwargs.get('instance_id', None)
filters = kwargs.get('filter', None)
instances = self._enforce_valid_instance_ids(context, instance_id)
return self._format_describe_instances(context,
instance_id=instance_id,
instance_cache=instances,
filter=filters)
def describe_instances_v6(self, context, **kwargs):
# Optional DescribeInstancesV6 argument
instance_id = kwargs.get('instance_id', None)
filters = kwargs.get('filter', None)
instances = self._enforce_valid_instance_ids(context, instance_id)
return self._format_describe_instances(context,
instance_id=instance_id,
instance_cache=instances,
filter=filters,
use_v6=True)
def _format_describe_instances(self, context, **kwargs):
return {'reservationSet': self._format_instances(context, **kwargs)}
def _format_run_instances(self, context, reservation_id):
i = self._format_instances(context, reservation_id=reservation_id)
assert len(i) == 1
return i[0]
def _format_terminate_instances(self, context, instance_id,
previous_states):
instances_set = []
for (ec2_id, previous_state) in zip(instance_id, previous_states):
i = {}
i['instanceId'] = ec2_id
i['previousState'] = _state_description(previous_state['vm_state'],
previous_state['shutdown_terminate'])
try:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
i['currentState'] = _state_description(instance['vm_state'],
instance['shutdown_terminate'])
except exception.NotFound:
i['currentState'] = _state_description(vm_states.DELETED,
True)
instances_set.append(i)
return {'instancesSet': instances_set}
def _format_instance_bdm(self, context, instance_uuid, root_device_name,
result):
"""Format InstanceBlockDeviceMappingResponseItemType."""
root_device_type = 'instance-store'
mapping = []
for bdm in block_device.legacy_mapping(
db.block_device_mapping_get_all_by_instance(context,
instance_uuid)):
volume_id = bdm['volume_id']
if (volume_id is None or bdm['no_device']):
continue
if (bdm['device_name'] == root_device_name and
(bdm['snapshot_id'] or bdm['volume_id'])):
assert not bdm['virtual_name']
root_device_type = 'ebs'
vol = self.volume_api.get(context, volume_id)
LOG.debug(_("vol = %s\n"), vol)
# TODO(yamahata): volume attach time
ebs = {'volumeId': ec2utils.id_to_ec2_vol_id(volume_id),
'deleteOnTermination': bdm['delete_on_termination'],
'attachTime': vol['attach_time'] or '',
'status': vol['attach_status'], }
res = {'deviceName': bdm['device_name'],
'ebs': ebs, }
mapping.append(res)
if mapping:
result['blockDeviceMapping'] = mapping
result['rootDeviceType'] = root_device_type
@staticmethod
def _format_instance_root_device_name(instance, result):
result['rootDeviceName'] = (instance.get('root_device_name') or
block_device.DEFAULT_ROOT_DEV_NAME)
@staticmethod
def _format_instance_type(instance, result):
instance_type = flavors.extract_flavor(instance)
result['instanceType'] = instance_type['name']
@staticmethod
def _format_group_set(instance, result):
security_group_names = []
if instance.get('security_groups'):
for security_group in instance['security_groups']:
security_group_names.append(security_group['name'])
result['groupSet'] = utils.convert_to_list_dict(
security_group_names, 'groupId')
def _format_instances(self, context, instance_id=None, use_v6=False,
instances_cache=None, **search_opts):
# TODO(termie): this method is poorly named as its name does not imply
# that it will be making a variety of database calls
# rather than simply formatting a bunch of instances that
# were handed to it
reservations = {}
if not instances_cache:
instances_cache = {}
# NOTE(vish): instance_id is an optional list of ids to filter by
if instance_id:
instances = []
for ec2_id in instance_id:
if ec2_id in instances_cache:
instances.append(instances_cache[ec2_id])
else:
try:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context,
ec2_id)
instance = self.compute_api.get(context, instance_uuid)
except exception.NotFound:
continue
instances.append(instance)
else:
try:
# always filter out deleted instances
search_opts['deleted'] = False
instances = self.compute_api.get_all(context,
search_opts=search_opts,
sort_dir='asc')
except exception.NotFound:
instances = []
for instance in instances:
if not context.is_admin:
if pipelib.is_vpn_image(instance['image_ref']):
continue
i = {}
instance_uuid = instance['uuid']
ec2_id = ec2utils.id_to_ec2_inst_id(instance_uuid)
i['instanceId'] = ec2_id
image_uuid = instance['image_ref']
i['imageId'] = ec2utils.glance_id_to_ec2_id(context, image_uuid)
self._format_kernel_id(context, instance, i, 'kernelId')
self._format_ramdisk_id(context, instance, i, 'ramdiskId')
i['instanceState'] = _state_description(
instance['vm_state'], instance['shutdown_terminate'])
fixed_ip = None
floating_ip = None
ip_info = ec2utils.get_ip_info_for_instance(context, instance)
if ip_info['fixed_ips']:
fixed_ip = ip_info['fixed_ips'][0]
if ip_info['floating_ips']:
floating_ip = ip_info['floating_ips'][0]
if ip_info['fixed_ip6s']:
i['dnsNameV6'] = ip_info['fixed_ip6s'][0]
if CONF.ec2_private_dns_show_ip:
i['privateDnsName'] = fixed_ip
else:
i['privateDnsName'] = instance['hostname']
i['privateIpAddress'] = fixed_ip
if floating_ip is not None:
i['ipAddress'] = floating_ip
i['dnsName'] = floating_ip
i['keyName'] = instance['key_name']
i['tagSet'] = []
for k, v in utils.instance_meta(instance).iteritems():
i['tagSet'].append({'key': k, 'value': v})
if context.is_admin:
i['keyName'] = '%s (%s, %s)' % (i['keyName'],
instance['project_id'],
instance['host'])
i['productCodesSet'] = utils.convert_to_list_dict([],
'product_codes')
self._format_instance_type(instance, i)
i['launchTime'] = instance['created_at']
i['amiLaunchIndex'] = instance['launch_index']
self._format_instance_root_device_name(instance, i)
self._format_instance_bdm(context, instance['uuid'],
i['rootDeviceName'], i)
host = instance['host']
zone = ec2utils.get_availability_zone_by_host(host)
i['placement'] = {'availabilityZone': zone}
if instance['reservation_id'] not in reservations:
r = {}
r['reservationId'] = instance['reservation_id']
r['ownerId'] = instance['project_id']
self._format_group_set(instance, r)
r['instancesSet'] = []
reservations[instance['reservation_id']] = r
reservations[instance['reservation_id']]['instancesSet'].append(i)
return list(reservations.values())
def describe_addresses(self, context, public_ip=None, **kwargs):
if public_ip:
floatings = []
for address in public_ip:
floating = self.network_api.get_floating_ip_by_address(context,
address)
floatings.append(floating)
else:
floatings = self.network_api.get_floating_ips_by_project(context)
addresses = [self._format_address(context, f) for f in floatings]
return {'addressesSet': addresses}
def _format_address(self, context, floating_ip):
ec2_id = None
if floating_ip['fixed_ip_id']:
fixed_id = floating_ip['fixed_ip_id']
fixed = self.network_api.get_fixed_ip(context, fixed_id)
if fixed['instance_uuid'] is not None:
ec2_id = ec2utils.id_to_ec2_inst_id(fixed['instance_uuid'])
address = {'public_ip': floating_ip['address'],
'instance_id': ec2_id}
if context.is_admin:
details = "%s (%s)" % (address['instance_id'],
floating_ip['project_id'])
address['instance_id'] = details
return address
def allocate_address(self, context, **kwargs):
LOG.audit(_("Allocate address"), context=context)
public_ip = self.network_api.allocate_floating_ip(context)
return {'publicIp': public_ip}
def release_address(self, context, public_ip, **kwargs):
LOG.audit(_('Release address %s'), public_ip, context=context)
self.network_api.release_floating_ip(context, address=public_ip)
return {'return': "true"}
def associate_address(self, context, instance_id, public_ip, **kwargs):
LOG.audit(_("Associate address %(public_ip)s to instance "
"%(instance_id)s"),
{'public_ip': public_ip, 'instance_id': instance_id},
context=context)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid)
cached_ipinfo = ec2utils.get_ip_info_for_instance(context, instance)
fixed_ips = cached_ipinfo['fixed_ips'] + cached_ipinfo['fixed_ip6s']
if not fixed_ips:
msg = _('Unable to associate IP Address, no fixed_ips.')
raise exception.NoMoreFixedIps(message=msg)
# TODO(tr3buchet): this will associate the floating IP with the
# first fixed_ip an instance has. This should be
# changed to support specifying a particular fixed_ip if
# multiple exist but this may not apply to ec2..
if len(fixed_ips) > 1:
msg = _('multiple fixed_ips exist, using the first: %s')
LOG.warning(msg, fixed_ips[0])
self.network_api.associate_floating_ip(context, instance,
floating_address=public_ip,
fixed_address=fixed_ips[0])
return {'return': 'true'}
def disassociate_address(self, context, public_ip, **kwargs):
instance_id = self.network_api.get_instance_id_by_floating_address(
context, public_ip)
if instance_id:
instance = self.compute_api.get(context, instance_id)
LOG.audit(_("Disassociate address %s"), public_ip, context=context)
self.network_api.disassociate_floating_ip(context, instance,
address=public_ip)
return {'return': "true"}
def run_instances(self, context, **kwargs):
min_count = int(kwargs.get('min_count', 1))
client_token = kwargs.get('client_token')
if client_token:
resv_id = self._resv_id_from_token(context, client_token)
if resv_id:
# since this client_token already corresponds to a reservation
# id, this returns a proper response without creating a new
# instance
return self._format_run_instances(context, resv_id)
if kwargs.get('kernel_id'):
kernel = self._get_image(context, kwargs['kernel_id'])
kwargs['kernel_id'] = ec2utils.id_to_glance_id(context,
kernel['id'])
if kwargs.get('ramdisk_id'):
ramdisk = self._get_image(context, kwargs['ramdisk_id'])
kwargs['ramdisk_id'] = ec2utils.id_to_glance_id(context,
ramdisk['id'])
for bdm in kwargs.get('block_device_mapping', []):
_parse_block_device_mapping(bdm)
image = self._get_image(context, kwargs['image_id'])
image_uuid = ec2utils.id_to_glance_id(context, image['id'])
if image:
image_state = self._get_image_state(image)
else:
raise exception.ImageNotFoundEC2(image_id=kwargs['image_id'])
if image_state != 'available':
msg = _('Image must be available')
raise exception.ImageNotActive(message=msg)
(instances, resv_id) = self.compute_api.create(context,
instance_type=flavors.get_flavor_by_name(
kwargs.get('instance_type', None)),
image_href=image_uuid,
max_count=int(kwargs.get('max_count', min_count)),
min_count=min_count,
kernel_id=kwargs.get('kernel_id'),
ramdisk_id=kwargs.get('ramdisk_id'),
key_name=kwargs.get('key_name'),
user_data=kwargs.get('user_data'),
security_group=kwargs.get('security_group'),
availability_zone=kwargs.get('placement', {}).get(
'availability_zone'),
block_device_mapping=kwargs.get('block_device_mapping', {}))
instances = self._format_run_instances(context, resv_id)
if instances:
instance_ids = [i['instanceId'] for i in instances['instancesSet']]
self._add_client_token(context, client_token, instance_ids)
return instances
def _add_client_token(self, context, client_token, instance_ids):
"""Add client token to reservation ID mapping."""
if client_token:
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
db.instance_system_metadata_update(
context, instance_uuid, {'EC2_client_token': client_token},
delete=False)
def _remove_client_token(self, context, instance_ids):
"""Remove client token to reservation ID mapping."""
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
sys_meta = db.instance_system_metadata_get(context, instance_uuid)
if 'EC2_client_token' in sys_meta:
del sys_meta['EC2_client_token']
db.instance_system_metadata_update(context, instance_uuid,
sys_meta, delete=True)
def _resv_id_from_token(self, context, client_token):
"""Get reservation ID from db."""
resv_id = None
sys_metas = self.compute_api.get_all_system_metadata(
context, search_filts=[{'key': ['EC2_client_token']},
{'value': [client_token]}])
for sys_meta in sys_metas:
if sys_meta and sys_meta.get('value') == client_token:
instance = instance_obj.Instance.get_by_uuid(
context, sys_meta['instance_id'], expected_attrs=None)
resv_id = instance.get('reservation_id')
break
return resv_id
def _ec2_ids_to_instances(self, context, instance_id, objects=False):
"""Get all instances first, to prevent partial executions."""
instances = []
extra = ['system_metadata', 'metadata']
for ec2_id in instance_id:
validate_ec2_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
if objects:
instance = instance_obj.Instance.get_by_uuid(
context, instance_uuid, expected_attrs=extra)
else:
instance = self.compute_api.get(context, instance_uuid)
instances.append(instance)
return instances
def terminate_instances(self, context, instance_id, **kwargs):
"""Terminate each instance in instance_id, which is a list of ec2 ids.
instance_id is a kwarg so its name cannot be modified.
"""
previous_states = self._ec2_ids_to_instances(context, instance_id,
objects=True)
self._remove_client_token(context, instance_id)
LOG.debug(_("Going to start terminating instances"))
for instance in previous_states:
self.compute_api.delete(context, instance)
return self._format_terminate_instances(context,
instance_id,
previous_states)
def reboot_instances(self, context, instance_id, **kwargs):
"""instance_id is a list of instance ids."""
instances = self._ec2_ids_to_instances(context, instance_id,
objects=True)
LOG.audit(_("Reboot instance %r"), instance_id, context=context)
for instance in instances:
self.compute_api.reboot(context, instance, 'HARD')
return True
def stop_instances(self, context, instance_id, **kwargs):
"""Stop each instances in instance_id.
Here instance_id is a list of instance ids
"""
instances = self._ec2_ids_to_instances(context, instance_id, True)
LOG.debug(_("Going to stop instances"))
for instance in instances:
self.compute_api.stop(context, instance)
return True
def start_instances(self, context, instance_id, **kwargs):
"""Start each instances in instance_id.
Here instance_id is a list of instance ids
"""
instances = self._ec2_ids_to_instances(context, instance_id, True)
LOG.debug(_("Going to start instances"))
for instance in instances:
self.compute_api.start(context, instance)
return True
def _get_image(self, context, ec2_id):
try:
internal_id = ec2utils.ec2_id_to_id(ec2_id)
image = self.image_service.show(context, internal_id)
except (exception.InvalidEc2Id, exception.ImageNotFound):
filters = {'name': ec2_id}
images = self.image_service.detail(context, filters=filters)
try:
return images[0]
except IndexError:
raise exception.ImageNotFound(image_id=ec2_id)
image_type = ec2_id.split('-')[0]
if ec2utils.image_type(image.get('container_format')) != image_type:
raise exception.ImageNotFound(image_id=ec2_id)
return image
def _format_image(self, image):
"""Convert from format defined by GlanceImageService to S3 format."""
i = {}
image_type = ec2utils.image_type(image.get('container_format'))
ec2_id = ec2utils.image_ec2_id(image.get('id'), image_type)
name = image.get('name')
i['imageId'] = ec2_id
kernel_id = image['properties'].get('kernel_id')
if kernel_id:
i['kernelId'] = ec2utils.image_ec2_id(kernel_id, 'aki')
ramdisk_id = image['properties'].get('ramdisk_id')
if ramdisk_id:
i['ramdiskId'] = ec2utils.image_ec2_id(ramdisk_id, 'ari')
i['imageOwnerId'] = image.get('owner')
img_loc = image['properties'].get('image_location')
if img_loc:
i['imageLocation'] = img_loc
else:
i['imageLocation'] = "%s (%s)" % (img_loc, name)
i['name'] = name
if not name and img_loc:
# This should only occur for images registered with ec2 api
# prior to that api populating the glance name
i['name'] = img_loc
i['imageState'] = self._get_image_state(image)
i['description'] = image.get('description')
display_mapping = {'aki': 'kernel',
'ari': 'ramdisk',
'ami': 'machine'}
i['imageType'] = display_mapping.get(image_type)
i['isPublic'] = not not image.get('is_public')
i['architecture'] = image['properties'].get('architecture')
properties = image['properties']
root_device_name = block_device.properties_root_device_name(properties)
root_device_type = 'instance-store'
for bdm in properties.get('block_device_mapping', []):
if (block_device.strip_dev(bdm.get('device_name')) ==
block_device.strip_dev(root_device_name) and
('snapshot_id' in bdm or 'volume_id' in bdm) and
not bdm.get('no_device')):
root_device_type = 'ebs'
i['rootDeviceName'] = (root_device_name or
block_device.DEFAULT_ROOT_DEV_NAME)
i['rootDeviceType'] = root_device_type
_format_mappings(properties, i)
return i
def describe_images(self, context, image_id=None, **kwargs):
# NOTE: image_id is a list!
if image_id:
images = []
for ec2_id in image_id:
try:
image = self._get_image(context, ec2_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=ec2_id)
images.append(image)
else:
images = self.image_service.detail(context)
images = [self._format_image(i) for i in images]
return {'imagesSet': images}
def deregister_image(self, context, image_id, **kwargs):
LOG.audit(_("De-registering image %s"), image_id, context=context)
image = self._get_image(context, image_id)
internal_id = image['id']
self.image_service.delete(context, internal_id)
return True
def _register_image(self, context, metadata):
image = self.image_service.create(context, metadata)
image_type = ec2utils.image_type(image.get('container_format'))
image_id = ec2utils.image_ec2_id(image['id'], image_type)
return image_id
def register_image(self, context, image_location=None, **kwargs):
if image_location is None and kwargs.get('name'):
image_location = kwargs['name']
if image_location is None:
msg = _('imageLocation is required')
raise exception.MissingParameter(reason=msg)
metadata = {'properties': {'image_location': image_location}}
if kwargs.get('name'):
metadata['name'] = kwargs['name']
else:
metadata['name'] = image_location
if 'root_device_name' in kwargs:
metadata['properties']['root_device_name'] = kwargs.get(
'root_device_name')
mappings = [_parse_block_device_mapping(bdm) for bdm in
kwargs.get('block_device_mapping', [])]
if mappings:
metadata['properties']['block_device_mapping'] = mappings
image_id = self._register_image(context, metadata)
LOG.audit(_('Registered image %(image_location)s with id '
'%(image_id)s'),
{'image_location': image_location, 'image_id': image_id},
context=context)
return {'imageId': image_id}
def describe_image_attribute(self, context, image_id, attribute, **kwargs):
def _block_device_mapping_attribute(image, result):
_format_mappings(image['properties'], result)
def _launch_permission_attribute(image, result):
result['launchPermission'] = []
if image['is_public']:
result['launchPermission'].append({'group': 'all'})
def _root_device_name_attribute(image, result):
_prop_root_dev_name = block_device.properties_root_device_name
result['rootDeviceName'] = _prop_root_dev_name(image['properties'])
if result['rootDeviceName'] is None:
result['rootDeviceName'] = block_device.DEFAULT_ROOT_DEV_NAME
def _kernel_attribute(image, result):
kernel_id = image['properties'].get('kernel_id')
if kernel_id:
result['kernel'] = {
'value': ec2utils.image_ec2_id(kernel_id, 'aki')
}
def _ramdisk_attribute(image, result):
ramdisk_id = image['properties'].get('ramdisk_id')
if ramdisk_id:
result['ramdisk'] = {
'value': ec2utils.image_ec2_id(ramdisk_id, 'ari')
}
supported_attributes = {
'blockDeviceMapping': _block_device_mapping_attribute,
'launchPermission': _launch_permission_attribute,
'rootDeviceName': _root_device_name_attribute,
'kernel': _kernel_attribute,
'ramdisk': _ramdisk_attribute,
}
fn = supported_attributes.get(attribute)
if fn is None:
raise exception.InvalidAttribute(attr=attribute)
try:
image = self._get_image(context, image_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=image_id)
result = {'imageId': image_id}
fn(image, result)
return result
def modify_image_attribute(self, context, image_id, attribute,
operation_type, **kwargs):
# TODO(devcamcar): Support users and groups other than 'all'.
if attribute != 'launchPermission':
raise exception.InvalidAttribute(attr=attribute)
if 'user_group' not in kwargs:
msg = _('user or group not specified')
raise exception.MissingParameter(reason=msg)
if len(kwargs['user_group']) != 1 and kwargs['user_group'][0] != 'all':
msg = _('only group "all" is supported')
raise exception.InvalidParameterValue(message=msg)
if operation_type not in ['add', 'remove']:
msg = _('operation_type must be add or remove')
raise exception.InvalidParameterValue(message=msg)
LOG.audit(_("Updating image %s publicity"), image_id, context=context)
try:
image = self._get_image(context, image_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=image_id)
internal_id = image['id']
del(image['id'])
image['is_public'] = (operation_type == 'add')
try:
return self.image_service.update(context, internal_id, image)
except exception.ImageNotAuthorized:
msg = _('Not allowed to modify attributes for image %s') % image_id
raise exception.NotAuthorized(message=msg)
def update_image(self, context, image_id, **kwargs):
internal_id = ec2utils.ec2_id_to_id(image_id)
result = self.image_service.update(context, internal_id, dict(kwargs))
return result
# TODO(yamahata): race condition
# At the moment there is no way to prevent others from
# manipulating instances/volumes/snapshots.
# As other code doesn't take it into consideration, here we don't
# care of it for now. Ostrich algorithm
def create_image(self, context, instance_id, **kwargs):
# NOTE(yamahata): name/description are ignored by register_image(),
# do so here
no_reboot = kwargs.get('no_reboot', False)
name = kwargs.get('name')
validate_ec2_id(instance_id)
ec2_instance_id = instance_id
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_instance_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
bdms = self.compute_api.get_instance_bdms(context, instance)
# CreateImage only supported for the analogue of EBS-backed instances
if not self.compute_api.is_volume_backed_instance(context, instance,
bdms):
msg = _("Invalid value '%(ec2_instance_id)s' for instanceId. "
"Instance does not have a volume attached at root "
"(%(root)s)") % {'root': instance['root_device_name'],
'ec2_instance_id': ec2_instance_id}
raise exception.InvalidParameterValue(err=msg)
# stop the instance if necessary
restart_instance = False
if not no_reboot:
vm_state = instance['vm_state']
# if the instance is in subtle state, refuse to proceed.
if vm_state not in (vm_states.ACTIVE, vm_states.STOPPED):
raise exception.InstanceNotRunning(instance_id=ec2_instance_id)
if vm_state == vm_states.ACTIVE:
restart_instance = True
self.compute_api.stop(context, instance)
# wait instance for really stopped
start_time = time.time()
while vm_state != vm_states.STOPPED:
time.sleep(1)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
vm_state = instance['vm_state']
# NOTE(yamahata): timeout and error. 1 hour for now for safety.
# Is it too short/long?
# Or is there any better way?
timeout = 1 * 60 * 60
if time.time() > start_time + timeout:
err = _("Couldn't stop instance within %d sec") % timeout
raise exception.InternalError(message=err)
glance_uuid = instance['image_ref']
ec2_image_id = ec2utils.glance_id_to_ec2_id(context, glance_uuid)
src_image = self._get_image(context, ec2_image_id)
image_meta = dict(src_image)
def _unmap_id_property(properties, name):
if properties[name]:
properties[name] = ec2utils.id_to_glance_id(context,
properties[name])
# ensure the ID properties are unmapped back to the glance UUID
_unmap_id_property(image_meta['properties'], 'kernel_id')
_unmap_id_property(image_meta['properties'], 'ramdisk_id')
# meaningful image name
name_map = dict(instance=instance['uuid'], now=timeutils.isotime())
name = name or _('image of %(instance)s at %(now)s') % name_map
new_image = self.compute_api.snapshot_volume_backed(context,
instance,
image_meta,
name)
ec2_id = ec2utils.glance_id_to_ec2_id(context, new_image['id'])
if restart_instance:
self.compute_api.start(context, instance)
return {'imageId': ec2_id}
def create_tags(self, context, **kwargs):
"""Add tags to a resource
Returns True on success, error on failure.
:param context: context under which the method is called
"""
resources = kwargs.get('resource_id', None)
tags = kwargs.get('tag', None)
if resources is None or tags is None:
msg = _('resource_id and tag are required')
raise exception.MissingParameter(reason=msg)
if not isinstance(resources, (tuple, list, set)):
msg = _('Expecting a list of resources')
raise exception.InvalidParameterValue(message=msg)
for r in resources:
if ec2utils.resource_type_from_id(context, r) != 'instance':
msg = _('Only instances implemented')
raise exception.InvalidParameterValue(message=msg)
if not isinstance(tags, (tuple, list, set)):
msg = _('Expecting a list of tagSets')
raise exception.InvalidParameterValue(message=msg)
metadata = {}
for tag in tags:
if not isinstance(tag, dict):
err = _('Expecting tagSet to be key/value pairs')
raise exception.InvalidParameterValue(message=err)
key = tag.get('key', None)
val = tag.get('value', None)
if key is None or val is None:
err = _('Expecting both key and value to be set')
raise exception.InvalidParameterValue(message=err)
metadata[key] = val
for ec2_id in resources:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
self.compute_api.update_instance_metadata(context,
instance, metadata)
return True
def delete_tags(self, context, **kwargs):
"""Delete tags
Returns True on success, error on failure.
:param context: context under which the method is called
"""
resources = kwargs.get('resource_id', None)
tags = kwargs.get('tag', None)
if resources is None or tags is None:
msg = _('resource_id and tag are required')
raise exception.MissingParameter(reason=msg)
if not isinstance(resources, (tuple, list, set)):
msg = _('Expecting a list of resources')
raise exception.InvalidParameterValue(message=msg)
for r in resources:
if ec2utils.resource_type_from_id(context, r) != 'instance':
msg = _('Only instances implemented')
raise exception.InvalidParameterValue(message=msg)
if not isinstance(tags, (tuple, list, set)):
msg = _('Expecting a list of tagSets')
raise exception.InvalidParameterValue(message=msg)
for ec2_id in resources:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
for tag in tags:
if not isinstance(tag, dict):
msg = _('Expecting tagSet to be key/value pairs')
raise exception.InvalidParameterValue(message=msg)
key = tag.get('key', None)
if key is None:
msg = _('Expecting key to be set')
raise exception.InvalidParameterValue(message=msg)
self.compute_api.delete_instance_metadata(context,
instance, key)
return True
def describe_tags(self, context, **kwargs):
"""List tags
Returns a dict with a single key 'tagSet' on success, error on failure.
:param context: context under which the method is called
"""
filters = kwargs.get('filter', None)
search_filts = []
if filters:
for filter_block in filters:
key_name = filter_block.get('name', None)
val = filter_block.get('value', None)
if val:
if isinstance(val, dict):
val = val.values()
if not isinstance(val, (tuple, list, set)):
val = (val,)
if key_name:
search_block = {}
if key_name in ('resource_id', 'resource-id'):
search_block['resource_id'] = []
for res_id in val:
search_block['resource_id'].append(
ec2utils.ec2_inst_id_to_uuid(context, res_id))
elif key_name in ['key', 'value']:
search_block[key_name] = \
[ec2utils.regex_from_ec2_regex(v) for v in val]
elif key_name in ('resource_type', 'resource-type'):
for res_type in val:
if res_type != 'instance':
raise exception.InvalidParameterValue(
message=_('Only instances implemented'))
search_block[key_name] = 'instance'
if len(search_block.keys()) > 0:
search_filts.append(search_block)
ts = []
for tag in self.compute_api.get_all_instance_metadata(context,
search_filts):
ts.append({
'resource_id': ec2utils.id_to_ec2_inst_id(tag['instance_id']),
'resource_type': 'instance',
'key': tag['key'],
'value': tag['value']
})
return {"tagSet": ts}
class EC2SecurityGroupExceptions(object):
@staticmethod
def raise_invalid_property(msg):
raise exception.InvalidParameterValue(message=msg)
@staticmethod
def raise_group_already_exists(msg):
raise exception.SecurityGroupExists(message=msg)
@staticmethod
def raise_invalid_group(msg):
raise exception.InvalidGroup(reason=msg)
@staticmethod
def raise_invalid_cidr(cidr, decoding_exception=None):
if decoding_exception:
raise decoding_exception
else:
raise exception.InvalidParameterValue(message=_("Invalid CIDR"))
@staticmethod
def raise_over_quota(msg):
raise exception.SecurityGroupLimitExceeded(msg)
@staticmethod
def raise_not_found(msg):
pass
class CloudSecurityGroupNovaAPI(EC2SecurityGroupExceptions,
compute_api.SecurityGroupAPI):
pass
class CloudSecurityGroupNeutronAPI(EC2SecurityGroupExceptions,
neutron_driver.SecurityGroupAPI):
pass
def get_cloud_security_group_api():
if cfg.CONF.security_group_api.lower() == 'nova':
return CloudSecurityGroupNovaAPI()
elif cfg.CONF.security_group_api.lower() in ('neutron', 'quantum'):
return CloudSecurityGroupNeutronAPI()
else:
raise NotImplementedError()
|
TieWei/nova
|
nova/api/ec2/cloud.py
|
Python
|
apache-2.0
| 81,753
|
from temboo.Library.PayPal.AdaptivePayments.ConvertCurrency import ConvertCurrency, ConvertCurrencyInputSet, ConvertCurrencyResultSet, ConvertCurrencyChoreographyExecution
from temboo.Library.PayPal.AdaptivePayments.PaymentDetails import PaymentDetails, PaymentDetailsInputSet, PaymentDetailsResultSet, PaymentDetailsChoreographyExecution
|
jordanemedlock/psychtruths
|
temboo/core/Library/PayPal/AdaptivePayments/__init__.py
|
Python
|
apache-2.0
| 339
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tank.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
zxl200406/minos
|
tank/manage.py
|
Python
|
apache-2.0
| 225
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetRelatedWords
# Retrieves the related words of a given word.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetRelatedWords(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetRelatedWords Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetRelatedWords, self).__init__(temboo_session, '/Library/Wordnik/Word/GetRelatedWords')
def new_input_set(self):
return GetRelatedWordsInputSet()
def _make_result_set(self, result, path):
return GetRelatedWordsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetRelatedWordsChoreographyExecution(session, exec_id, path)
class GetRelatedWordsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetRelatedWords
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key from Wordnik.)
"""
super(GetRelatedWordsInputSet, self)._set_input('APIKey', value)
def set_Cannonical(self, value):
"""
Set the value of the Cannonical input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(GetRelatedWordsInputSet, self)._set_input('Cannonical', value)
def set_LimitPerType(self, value):
"""
Set the value of the LimitPerType input for this Choreo. ((optional, integer) Limits the amount of results returned for each relationship type included in the output. Defaults to 10.)
"""
super(GetRelatedWordsInputSet, self)._set_input('LimitPerType', value)
def set_RelationshipType(self, value):
"""
Set the value of the RelationshipType input for this Choreo. ((optional, string) Limits the total results per type of relationship. Acceptable values inlcude adjective, noun, etc. See docs for complete list.)
"""
super(GetRelatedWordsInputSet, self)._set_input('RelationshipType', value)
def set_ResponseType(self, value):
"""
Set the value of the ResponseType input for this Choreo. ((optional, string) Response can be either JSON or XML. Defaults to JSON.)
"""
super(GetRelatedWordsInputSet, self)._set_input('ResponseType', value)
def set_UseCanonical(self, value):
"""
Set the value of the UseCanonical input for this Choreo. ((optional, boolean) If true will try to return the correct word root ('cats' -> 'cat'). If false returns exactly what was requested. Defaults to false.)
"""
super(GetRelatedWordsInputSet, self)._set_input('UseCanonical', value)
def set_Word(self, value):
"""
Set the value of the Word input for this Choreo. ((required, string) The word you want to look up on Wordnik.)
"""
super(GetRelatedWordsInputSet, self)._set_input('Word', value)
class GetRelatedWordsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetRelatedWords Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Wordnik.)
"""
return self._output.get('Response', None)
class GetRelatedWordsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetRelatedWordsResultSet(response, path)
|
jordanemedlock/psychtruths
|
temboo/core/Library/Wordnik/Word/GetRelatedWords.py
|
Python
|
apache-2.0
| 4,782
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.contrib.hooks.qubole_hook import QuboleHook
class QuboleOperator(BaseOperator):
"""
Execute tasks (commands) on QDS (https://qubole.com).
:param qubole_conn_id: Connection id which consists of qds auth_token
:type qubole_conn_id: str
kwargs:
:command_type: type of command to be executed, e.g. hivecmd, shellcmd, hadoopcmd
:tags: array of tags to be assigned with the command
:cluster_label: cluster label on which the command will be executed
:name: name to be given to command
**Arguments specific to command types**
hivecmd:
:query: inline query statement
:script_location: s3 location containing query statement
:sample_size: size of sample in bytes on which to run query
:macros: macro values which were used in query
prestocmd:
:query: inline query statement
:script_location: s3 location containing query statement
:macros: macro values which were used in query
hadoopcmd:
:sub_commnad: must be one these ["jar", "s3distcp", "streaming"] followed by
1 or more args
shellcmd:
:script: inline command with args
:script_location: s3 location containing query statement
:files: list of files in s3 bucket as file1,file2 format. These files will be
copied into the working directory where the qubole command is being
executed.
:archives: list of archives in s3 bucket as archive1,archive2 format. These
will be unarchived intothe working directory where the qubole command is
being executed
:parameters: any extra args which need to be passed to script (only when
script_location is supplied)
pigcmd:
:script: inline query statement (latin_statements)
:script_location: s3 location containing pig query
:parameters: any extra args which need to be passed to script (only when
script_location is supplied
sparkcmd:
:program: the complete Spark Program in Scala, SQL, Command, R, or Python
:cmdline: spark-submit command line, all required information must be specify
in cmdline itself.
:sql: inline sql query
:script_location: s3 location containing query statement
:language: language of the program, Scala, SQL, Command, R, or Python
:app_id: ID of an Spark job server app
:arguments: spark-submit command line arguments
:user_program_arguments: arguments that the user program takes in
:macros: macro values which were used in query
dbtapquerycmd:
:db_tap_id: data store ID of the target database, in Qubole.
:query: inline query statement
:macros: macro values which were used in query
dbexportcmd:
:mode: 1 (simple), 2 (advance)
:hive_table: Name of the hive table
:partition_spec: partition specification for Hive table.
:dbtap_id: data store ID of the target database, in Qubole.
:db_table: name of the db table
:db_update_mode: allowinsert or updateonly
:db_update_keys: columns used to determine the uniqueness of rows
:export_dir: HDFS/S3 location from which data will be exported.
:fields_terminated_by: hex of the char used as column separator in the dataset
dbimportcmd:
:mode: 1 (simple), 2 (advance)
:hive_table: Name of the hive table
:dbtap_id: data store ID of the target database, in Qubole.
:db_table: name of the db table
:where_clause: where clause, if any
:parallelism: number of parallel db connections to use for extracting data
:extract_query: SQL query to extract data from db. $CONDITIONS must be part
of the where clause.
:boundary_query: Query to be used get range of row IDs to be extracted
:split_column: Column used as row ID to split data into ranges (mode 2)
.. note:: Following fields are template-supported : ``query``, ``script_location``,
``sub_command``, ``script``, ``files``, ``archives``, ``program``, ``cmdline``,
``sql``, ``where_clause``, ``extract_query``, ``boundary_query``, ``macros``,
``tags``, ``name``, ``parameters``. You can also use ``.txt`` files for template
driven use cases.
.. note:: In QuboleOperator there is a default handler for task failures and retries,
which generally kills the command running at QDS for the corresponding task
instance. You can override this behavior by providing your own failure and retry
handler in task definition.
"""
template_fields = ('query', 'script_location', 'sub_command', 'script', 'files',
'archives', 'program', 'cmdline', 'sql', 'where_clause', 'tags',
'extract_query', 'boundary_query', 'macros', 'name', 'parameters')
template_ext = ('.txt',)
ui_color = '#3064A1'
ui_fgcolor = '#fff'
@apply_defaults
def __init__(self, qubole_conn_id="qubole_default", *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.kwargs['qubole_conn_id'] = qubole_conn_id
self.hook = QuboleHook(*self.args, **self.kwargs)
super(QuboleOperator, self).__init__(*args, **kwargs)
if self.on_failure_callback is None:
self.on_failure_callback = QuboleHook.handle_failure_retry
if self.on_retry_callback is None:
self.on_retry_callback = QuboleHook.handle_failure_retry
def execute(self, context):
# Reinitiating the hook, as some template fields might have changed
self.hook = QuboleHook(*self.args, **self.kwargs)
return self.hook.execute(context)
def on_kill(self, ti):
self.hook.kill(ti)
def get_results(self, ti=None, fp=None, inline=True, delim=None, fetch=True):
return self.hook.get_results(ti, fp, inline, delim, fetch)
def get_log(self, ti):
return self.hook.get_log(ti)
def get_jobs_id(self, ti):
return self.hook.get_jobs_id(ti)
def __getattribute__(self, name):
if name in QuboleOperator.template_fields:
if name in self.kwargs:
return self.kwargs[name]
else:
return ''
else:
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name in QuboleOperator.template_fields:
self.kwargs[name] = value
else:
object.__setattr__(self, name, value)
|
mtdewulf/incubator-airflow
|
airflow/contrib/operators/qubole_operator.py
|
Python
|
apache-2.0
| 7,509
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test RangeDataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.contrib.data.python.ops import dataset_ops
from tensorflow.contrib.data.python.ops import enumerate_ops
from tensorflow.contrib.data.python.ops import iterator_ops as contrib_iterator_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.training import saver as saver_lib
class RangeDatasetTest(test.TestCase):
def tearDown(self):
# Remove all checkpoint files.
prefix = self._iterator_checkpoint_prefix()
pattern = prefix + "*"
files = gfile.Glob(pattern)
map(gfile.Remove, files)
def testStop(self):
stop = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={stop: 5})
for i in range(5):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testStartStop(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 2, stop: 5})
for i in range(2, 5):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testStartStopStep(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
step = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start, stop,
step).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 2, stop: 10, step: 2})
for i in range(2, 10, 2):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testZeroStep(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
step = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start, stop,
step).make_initializable_iterator()
init_op = iterator.initializer
with self.test_session() as sess:
with self.assertRaises(errors.InvalidArgumentError):
sess.run(init_op, feed_dict={start: 2, stop: 10, step: 0})
def testNegativeStep(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
step = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start, stop,
step).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 2, stop: 10, step: -1})
# This for loop is a no-op but will ensure that the implementation is
# consistent with range if it ever changes.
for i in range(2, 10, -1):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testStopLessThanStart(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 10, stop: 2})
# This for loop is a no-op but will ensure that the implementation is
# consistent with range if it ever changes.
for i in range(10, 2):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testStopLessThanStartWithPositiveStep(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
step = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start, stop,
step).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 10, stop: 2, step: 2})
# This for loop is a no-op but will ensure that the implementation is
# consistent with range if it ever changes.
for i in range(10, 2, 2):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testStopLessThanStartWithNegativeStep(self):
start = array_ops.placeholder(dtypes.int64, shape=[])
stop = array_ops.placeholder(dtypes.int64, shape=[])
step = array_ops.placeholder(dtypes.int64, shape=[])
iterator = dataset_ops.Dataset.range(start, stop,
step).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op, feed_dict={start: 10, stop: 2, step: -1})
for i in range(10, 2, -1):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testEnumerateDataset(self):
components = (["a", "b"], [1, 2], [37.0, 38])
start = constant_op.constant(20, dtype=dtypes.int64)
iterator = (dataset_ops.Dataset.from_tensor_slices(components).apply(
enumerate_ops.enumerate_dataset(start)).make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual(dtypes.int64, get_next[0].dtype)
self.assertEqual((), get_next[0].shape)
self.assertEqual([tensor_shape.TensorShape([])] * 3,
[t.shape for t in get_next[1]])
with self.test_session() as sess:
sess.run(init_op)
self.assertEqual((20, (b"a", 1, 37.0)), sess.run(get_next))
self.assertEqual((21, (b"b", 2, 38.0)), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def _iterator_checkpoint_prefix(self):
return os.path.join(self.get_temp_dir(), "iterator")
def _save_op(self, iterator_resource):
iterator_state_variant = gen_dataset_ops.serialize_iterator(
iterator_resource)
save_op = io_ops.write_file(
self._iterator_checkpoint_prefix(),
parsing_ops.serialize_tensor(iterator_state_variant))
return save_op
def _restore_op(self, iterator_resource):
iterator_state_variant = parsing_ops.parse_tensor(
io_ops.read_file(self._iterator_checkpoint_prefix()), dtypes.variant)
restore_op = gen_dataset_ops.deserialize_iterator(iterator_resource,
iterator_state_variant)
return restore_op
def testSaveRestore(self):
def _build_graph(start, stop):
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
# Saving and restoring in different sessions.
start = 2
stop = 10
break_point = 5
with ops.Graph().as_default() as g:
init_op, get_next, save_op, _ = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next, _, restore_op = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(init_op)
sess.run(restore_op)
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Saving and restoring in same session.
with ops.Graph().as_default() as g:
init_op, get_next, save_op, restore_op = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
sess.run(restore_op)
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSaveRestoreUsingSaverFromMetaGraph(self):
def _build_graph(start, stop):
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
ops.add_to_collection("iterator_ops", init_op)
ops.add_to_collection("iterator_ops", get_next)
saveable_obj = contrib_iterator_ops.make_saveable_from_iterator(iterator)
# Add the SaveableObject to the `SAVEABLE_OBJECTS` collection
# so that it can be automatically picked up by the Saver.
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable_obj)
saver = saver_lib.Saver()
return init_op, get_next, saver
start = 2
stop = 10
break_point = 5
path = self._iterator_checkpoint_prefix()
meta_filename = path + ".meta"
# Execute input pipeline for a few steps and save iterator state.
with ops.Graph().as_default() as g:
init_op, get_next, saver = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
saver.save(sess, path)
# Build the saver from the MetaGraph using import_meta_graph and
# check that the iterator state is restored.
with ops.Graph().as_default() as g:
saver = saver_lib.import_meta_graph(meta_filename)
init_op, get_next = ops.get_collection("iterator_ops")
with self.test_session(graph=g) as sess:
saver.restore(sess, saver_lib.latest_checkpoint(self.get_temp_dir()))
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSaveRestoreUsingBuiltSaver(self):
def _build_graph(start, stop):
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
ops.add_to_collection("iterator_ops", init_op)
ops.add_to_collection("iterator_ops", get_next)
# Add the SaveableObject to the `SAVEABLE_OBJECTS` collection
# so that it can be automatically picked up by the Saver.
saveable_obj = contrib_iterator_ops.make_saveable_from_iterator(iterator)
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable_obj)
saver = saver_lib.Saver()
return init_op, get_next, saver
start = 2
stop = 10
stop_new = 15
break_point = 5
path = self._iterator_checkpoint_prefix()
# Execute input pipeline for a few steps and save iterator state.
with ops.Graph().as_default() as g:
init_op, get_next, saver = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
saver.save(sess, path)
# Manually build a modified Graph and Saver instead of importing
# MetaGraph and verify that original iterator state gets restored.
with ops.Graph().as_default() as g:
init_op, get_next, saver = _build_graph(start, stop_new)
with self.test_session(graph=g) as sess:
saver.restore(sess, saver_lib.latest_checkpoint(self.get_temp_dir()))
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSaveRestoreUsingSaverThenInit(self):
def _build_graph(start, stop):
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
ops.add_to_collection("iterator_ops", init_op)
ops.add_to_collection("iterator_ops", get_next)
# Add the SaveableObject to the `SAVEABLE_OBJECTS` collection
# so that it can be automatically picked up by the Saver.
saveable_obj = contrib_iterator_ops.make_saveable_from_iterator(iterator)
ops.add_to_collection(ops.GraphKeys.SAVEABLE_OBJECTS, saveable_obj)
saver = saver_lib.Saver()
return init_op, get_next, saver
start = 2
stop = 10
stop_new = 15
break_point = 5
path = self._iterator_checkpoint_prefix()
# Execute input pipeline for a few steps and save iterator state.
with ops.Graph().as_default() as g:
init_op, get_next, saver = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
saver.save(sess, path)
# Restore iterator state call and then call init_op for the iterator and
# verify that the new iterator hides the restored iterator.
with ops.Graph().as_default() as g:
init_op, get_next, saver = _build_graph(start, stop_new)
with self.test_session(graph=g) as sess:
saver.restore(sess, saver_lib.latest_checkpoint(self.get_temp_dir()))
sess.run(init_op)
for i in range(start, stop_new):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testRestoreWithoutBuildingDatasetGraph(self):
def _build_graph(start, stop, num_epochs):
dataset = dataset_ops.Dataset.range(start, stop).repeat(num_epochs)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
# Saving and restoring in different sessions.
start = 2
stop = 10
num_epochs = 5
break_point = 5
break_epoch = 3
with ops.Graph().as_default() as g:
init_op, get_next, save_op, _ = _build_graph(start, stop, num_epochs)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for _ in range(break_epoch):
for i in range(start, stop):
self.assertEqual(i, sess.run(get_next))
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
# Create an empty IteratorResource and restore the Iterator into it.
output_types = dtypes.int64
output_shapes = tensor_shape.scalar()
iterator = iterator_ops.Iterator.from_structure(output_types,
output_shapes)
restore_op = self._restore_op(iterator._iterator_resource)
get_next = iterator.get_next()
with self.test_session(graph=g) as sess:
sess.run(restore_op)
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
for _ in range(break_epoch + 1, num_epochs):
for i in range(start, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testRestoreInModifiedGraph(self):
def _build_graph(start, stop):
dataset = dataset_ops.Dataset.range(start, stop)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
# Saving and restoring in different sessions.
start = 2
stop = 10
stop_1 = 8
break_point = 5
with ops.Graph().as_default() as g:
init_op, get_next, save_op, _ = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
# Intentionally build a graph with a different value for stop to make sure
# the original dataset graph is actually getting loaded.
init_op, get_next, _, restore_op = _build_graph(start, stop_1)
with self.test_session(graph=g) as sess:
sess.run(restore_op)
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testInitThenRestore(self):
# Note: Calling init_op before restore_op is redundant. This test just makes
# sure we do not fail if restore is called on an already initialized
# iterator resource.
def _build_graph(start, stop):
dataset = dataset_ops.Dataset.range(start, stop)
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
# Saving and restoring in different sessions.
start = 2
stop = 10
break_point = 5
with ops.Graph().as_default() as g:
init_op, get_next, save_op, _ = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next, _, restore_op = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(init_op)
sess.run(restore_op)
for i in range(break_point, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testMultipleSaves(self):
def _build_graph(start, stop):
iterator = dataset_ops.Dataset.range(start,
stop).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
start = 2
stop = 10
break_point1 = 5
break_point2 = 7
with ops.Graph().as_default() as g:
init_op, get_next, save_op, _ = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
for i in range(start, break_point1):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next, save_op, restore_op = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(restore_op)
for i in range(break_point1, break_point2):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
break_point2 = 7
with ops.Graph().as_default() as g:
init_op, get_next, save_op, restore_op = _build_graph(start, stop)
with self.test_session(graph=g) as sess:
sess.run(restore_op)
for i in range(break_point2, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSaveRestoreWithRepeat(self):
def _build_graph(start, stop, num_epochs):
iterator = dataset_ops.Dataset.range(
start, stop).repeat(num_epochs).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
start = 2
stop = 10
num_epochs = 5
break_range = 5
break_epoch = 3
with ops.Graph().as_default() as g:
init_op, get_next, save_op, restore_op = _build_graph(
start, stop, num_epochs)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for _ in range(break_epoch - 1):
for i in range(start, stop):
self.assertEqual(i, sess.run(get_next))
for i in range(start, break_range):
self.assertEqual(i, sess.run(get_next))
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next, _, restore_op = _build_graph(start, stop, num_epochs)
with self.test_session(graph=g) as sess:
sess.run(restore_op)
for i in range(break_range, stop):
self.assertEqual(i, sess.run(get_next))
for _ in range(break_epoch, num_epochs):
for i in range(start, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSaveRestoreExhaustedIterator(self):
def _build_graph(start, stop, num_epochs):
iterator = dataset_ops.Dataset.range(
start, stop).repeat(num_epochs).make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next, save_op, restore_op
start = 2
stop = 10
num_epochs = 5
with ops.Graph().as_default() as g:
init_op, get_next, save_op, restore_op = _build_graph(
start, stop, num_epochs)
with self.test_session(graph=g) as sess:
sess.run(variables.global_variables_initializer())
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for _ in range(num_epochs):
for i in range(start, stop):
self.assertEqual(i, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next, _, restore_op = _build_graph(start, stop, num_epochs)
with self.test_session(graph=g) as sess:
sess.run(restore_op)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
if __name__ == "__main__":
test.main()
|
horance-liu/tensorflow
|
tensorflow/contrib/data/python/kernel_tests/range_dataset_op_test.py
|
Python
|
apache-2.0
| 25,983
|
# ==================================================================
# Copyright (c) 2007,2008,2009 Metaweb Technologies, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB
# TECHNOLOGIES OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ====================================================================
import re
import logging
log = logging.getLogger()
class FbException(Exception):
pass
class CmdException(Exception):
pass
media_types = {
'html': ['text/html'],
'txt': ['text/plain'],
'xml': ['text/xml',
'application/xml'],
'atom': ['application/atom+xml'],
'js': ['text/javascript',
'application/javascript',
'application/x-javascript'],
'json': ['application/json'],
'jpg': ['image/jpeg',
'image/pjpeg'],
'gif': ['image/gif'],
'png': ['image/png'],
}
extension_to_media_type = dict([(k,vs[0]) for k,vs in media_types.items()])
media_type_to_extension = {}
for k,vs in media_types.items():
for v in vs:
media_type_to_extension[v] = k
DIRSPLIT = re.compile(r'^(.+)/([^/]+)$')
def dirsplit_unsafe(id):
m = DIRSPLIT.match(id)
if m is None:
return (None, id)
dir,file = m.groups()
return (dir,file)
def dirsplit(id):
dir,file = dirsplit_unsafe(id)
if dir == '/guid':
raise FbException('%r is not a freebase keypath' % (id,))
return (dir,file)
value_types = [
'/type/text',
'/type/key',
'/type/rawstring',
'/type/float',
'/type/int',
'/type/boolean',
'/type/uri',
'/type/datetime',
'/type/id',
'/type/enumeration',
]
default_propkeys = {
'value': '/type/value/value',
'id': '/type/object/id',
'guid': '/type/object/guid',
'type': '/type/object/type',
'name': '/type/object/name',
'key': '/type/object/key',
'timestamp': '/type/object/timestamp',
'permission': '/type/object/permission',
'creator': '/type/object/creator',
'attribution': '/type/object/attribution'
};
|
yuvadm/freebase-python
|
examples/freebase-images-appengine/freebase/fcl/fbutil.py
|
Python
|
bsd-2-clause
| 3,369
|
##########################################################################
#
# Copyright (c) 2010-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import hou
import IECore
import IECoreHoudini
import unittest
import os
class TestFromHoudiniGroupConverter( IECoreHoudini.TestCase ) :
def geo( self ) :
geo = hou.node( "/obj/geo1" )
if not geo :
obj = hou.node( "/obj" )
geo = obj.createNode( "geo", run_init_scripts=False )
return geo
def points( self ) :
points = self.geo().createNode( "popnet" )
points.createNode( "location" )
color = points.createOutputNode( "color" )
color.parm( "colortype" ).set( 2 )
name = color.createOutputNode( "name" )
name.parm( "name1" ).set( "pointsGroup" )
hou.setFrame( 50 )
return name
def torusA( self ) :
torus = self.geo().createNode( "torus" )
color = torus.createOutputNode( "color" )
color.parm( "colortype" ).set( 2 )
name = color.createOutputNode( "name" )
name.parm( "name1" ).set( "torusAGroup" )
return name
def torusB( self ) :
torus = self.geo().createNode( "torus" )
xform = torus.createOutputNode( "xform" )
xform.parmTuple( "t" ).set( ( 5, 0, 0 ) )
name = xform.createOutputNode( "name" )
name.parm( "name1" ).set( "torusBGroup" )
return name
def twoTorii( self ) :
merge = self.geo().createNode( "merge" )
merge.setInput( 0, self.torusA() )
merge.setInput( 1, self.torusB() )
return merge
def curve( self ) :
curve = self.geo().createNode( "curve" )
curve.parm( "type" ).set( 1 ) # NURBS
curve.parm( "coords" ).set( '0.160607,0,6.34083 -4.27121,0,5.13891 2.88941,0,5.29624 -4.70504,0,3.14992 2.281,0,3.42478 -3.78185,0,1.35239' )
color = curve.createOutputNode( "color" )
color.parm( "colortype" ).set( 2 )
return color
def box( self ) :
return self.geo().createNode( "box" )
def curveBox( self ) :
merge = self.geo().createNode( "merge" )
merge.setInput( 0, self.curve() )
merge.setInput( 1, self.box() )
name = merge.createOutputNode( "name" )
name.parm( "name1" ).set( "curveBoxGroup" )
return name
def buildScene( self ) :
merge = self.geo().createNode( "merge" )
merge.setInput( 0, self.points() )
merge.setInput( 1, self.twoTorii() )
merge.setInput( 2, self.curveBox() )
return merge
def testCreateConverter( self ) :
converter = IECoreHoudini.FromHoudiniGroupConverter( self.box() )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
def testFactory( self ) :
scene = self.buildScene()
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( scene )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( self.box(), IECore.TypeId.Group )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( scene, IECore.TypeId.Parameter )
self.assertEqual( converter, None )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( self.points() )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPointsConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( self.curve() )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniCurvesConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( self.torusA() )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPolygonsConverter ) ) )
merge = self.twoTorii()
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
merge.inputs()[0].bypass( True )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
merge.inputs()[1].bypass( True )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPolygonsConverter ) ) )
group = self.curveBox()
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( group )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
group.bypass( True )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( group )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
self.failUnless( IECore.TypeId.Group in IECoreHoudini.FromHoudiniGeometryConverter.supportedTypes() )
converter = IECoreHoudini.FromHoudiniGeometryConverter.createDummy( IECore.TypeId.Group )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
converter = IECoreHoudini.FromHoudiniGeometryConverter.createDummy( [ IECore.TypeId.Group ] )
self.assert_( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
points = self.points()
torus = self.torusA()
merge = self.geo().createNode( "merge" )
merge.setInput( 0, self.points() )
merge.setInput( 1, self.twoTorii() )
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( merge )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
def testConvertFromHOMGeo( self ) :
geo = self.curveBox().geometry()
converter = IECoreHoudini.FromHoudiniGeometryConverter.createFromGeo( geo )
self.failUnless( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
result = converter.convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
converter2 = IECoreHoudini.FromHoudiniGeometryConverter.createFromGeo( geo, IECore.TypeId.Group )
self.failUnless( converter2.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniGroupConverter ) ) )
def testConvertScene( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.buildScene() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 5 )
self.assertEqual( result.children()[0].blindData()["name"].value, "curveBoxGroup" )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[1].blindData()["name"].value, "curveBoxGroup" )
self.failUnless( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertEqual( result.children()[2].blindData()["name"].value, "pointsGroup" )
self.failUnless( result.children()[2].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
self.assertEqual( result.children()[3].blindData()["name"].value, "torusAGroup" )
self.failUnless( result.children()[3].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[4].blindData()["name"].value, "torusBGroup" )
self.failUnless( result.children()[4].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
def testConvertSceneFromNameAttribute( self ) :
merge = self.buildScene()
converter = IECoreHoudini.FromHoudiniGroupConverter( merge )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.NameAttribute )
result = converter.convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 5 )
self.assertEqual( set([ x.typeId() for x in result.children() ]), set([IECore._IECore.TypeId.CurvesPrimitive, IECore._IECore.TypeId.MeshPrimitive, IECore._IECore.TypeId.PointsPrimitive]) )
names = [ "curveBoxGroup", "curveBoxGroup", "pointsGroup", "torusAGroup", "torusBGroup" ]
self.assertEqual( sorted([ x.blindData()["name"].value for x in result.children() ]), names )
for child in result.children() :
self.failUnless( child.arePrimitiveVariablesValid() )
self.failUnless( "name" not in child )
self.failUnless( "nameIndices" not in child )
if child.blindData()["name"].value == "pointsGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
elif child.blindData()["name"].value == "torusAGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "torusBGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "curveBoxGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.CurvesPrimitive ) or child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
else :
raise RuntimeError, "An unexpected shape name exists"
null = merge.parent().createNode( "null" )
self.failUnless( IECoreHoudini.ToHoudiniGeometryConverter.create( result ).convert( null ) )
for prim in null.geometry().prims() :
self.failUnless( prim.stringAttribValue( "name" ) in names )
def testConvertSceneFromGroups( self ) :
merge = self.buildScene()
for node in merge.parent().children() :
if node.type().name() == "name" :
node.bypass( True )
group = node.createOutputNode( "group" )
group.parm( "crname" ).set( node.parm( "name1" ).eval() )
connection = node.outputConnections()[0]
connection.outputNode().setInput( connection.inputIndex(), group )
converter = IECoreHoudini.FromHoudiniGroupConverter( merge )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.PrimitiveGroup )
result = converter.convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 4 )
self.assertEqual( set([ x.typeId() for x in result.children() ]), set([IECore._IECore.TypeId.Group, IECore._IECore.TypeId.MeshPrimitive, IECore._IECore.TypeId.PointsPrimitive]) )
names = [ "curveBoxGroup", "pointsGroup", "torusAGroup", "torusBGroup" ]
self.assertEqual( sorted([ x.blindData()["name"].value for x in result.children() ]), names )
for child in result.children() :
if child.isInstanceOf( IECore.TypeId.Primitive ) :
self.failUnless( child.arePrimitiveVariablesValid() )
self.failUnless( "name" not in child )
self.failUnless( "nameIndices" not in child )
if child.blindData()["name"].value == "pointsGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
elif child.blindData()["name"].value == "torusAGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "torusBGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "curveBoxGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.Group ) )
self.failUnless( child.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.failUnless( child.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
else :
raise RuntimeError, "An unexpected shape name exists"
null = merge.parent().createNode( "null" )
self.failUnless( IECoreHoudini.ToHoudiniGeometryConverter.create( result ).convert( null ) )
for prim in null.geometry().prims() :
self.failUnless( prim.stringAttribValue( "name" ) in names )
def testNameAttributeExistsButSomeValuesAreEmpty( self ) :
merge = self.buildScene()
values = []
namers = [ x for x in merge.parent().children() if x.type().name() == "name" ]
for namer in namers :
values.append( namer.parm( "name1" ).eval() )
if namer.parm( "name1" ).eval() == "torusBGroup" :
namer.bypass( True )
values[-1] = ""
converter = IECoreHoudini.FromHoudiniGroupConverter( merge )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.NameAttribute )
result = converter.convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 5 )
self.assertEqual( set([ x.typeId() for x in result.children() ]), set([IECore._IECore.TypeId.CurvesPrimitive, IECore._IECore.TypeId.MeshPrimitive, IECore._IECore.TypeId.PointsPrimitive]) )
names = [ "", "curveBoxGroup", "curveBoxGroup", "pointsGroup", "torusAGroup" ]
self.assertEqual( sorted([ x.blindData().get( "name", IECore.StringData() ).value for x in result.children() ]), names )
for child in result.children() :
self.failUnless( child.arePrimitiveVariablesValid() )
self.failUnless( "name" not in child )
self.failUnless( "nameIndices" not in child )
if "name" not in child.blindData() :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "pointsGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.PointsPrimitive ) )
elif child.blindData()["name"].value == "torusAGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
elif child.blindData()["name"].value == "curveBoxGroup" :
self.failUnless( child.isInstanceOf( IECore.TypeId.CurvesPrimitive ) or child.isInstanceOf( IECore.TypeId.MeshPrimitive ) )
else :
raise RuntimeError, "An unexpected shape name exists"
null = merge.parent().createNode( "null" )
self.failUnless( IECoreHoudini.ToHoudiniGeometryConverter.create( result ).convert( null ) )
for prim in null.geometry().prims() :
self.failUnless( prim.stringAttribValue( "name" ) in values )
def testConvertGroupedPoints( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.points() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
self.assertEqual( result.children()[0].blindData()["name"].value, "pointsGroup" )
def testConvertUngroupedPoints( self ) :
group = self.points()
group.bypass( True )
result = IECoreHoudini.FromHoudiniGroupConverter( group ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
def testConvertGroupedCurves( self ) :
curve = self.curve()
group = curve.createOutputNode( "group" )
group.parm( "crname" ).set( "curvesGroup" )
result = IECoreHoudini.FromHoudiniGroupConverter( group ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertEqual( result.children()[0].blindData()["name"].value, "curvesGroup" )
def testConvertUngroupedCurves( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.curve() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
def testConvertGroupedPolygons( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.torusA() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[0].blindData()["name"].value, "torusAGroup" )
def testConvertUngroupedPolygons( self ) :
group = self.torusA()
group.bypass( True )
result = IECoreHoudini.FromHoudiniGroupConverter( group ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
def testConvertGroupedMultiPolygons( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.twoTorii() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 2 )
childNames = [ x.blindData()["name"].value for x in result.children() ]
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.failUnless( "torusAGroup" in childNames )
self.assertEqual( result.children()[0]["P"].data.size(), 100 )
self.failUnless( result.children()[1].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.failUnless( "torusBGroup" in childNames )
self.assertEqual( result.children()[1]["P"].data.size(), 100 )
def testConvertUngroupedMultiPolygons( self ) :
merge = self.twoTorii()
merge.inputs()[0].bypass( True )
merge.inputs()[1].bypass( True )
result = IECoreHoudini.FromHoudiniGroupConverter( merge ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 1 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
self.assertEqual( result.children()[0]["P"].data.size(), 200 )
def testConvertPartiallyGroupedMultiPolygons( self ) :
merge = self.twoTorii()
merge.inputs()[0].bypass( True )
result = IECoreHoudini.FromHoudiniGroupConverter( merge ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 2 )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
self.assertEqual( result.children()[0]["P"].data.size(), 100 )
self.failUnless( result.children()[1].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[1].blindData()["name"].value, "torusBGroup" )
self.assertEqual( result.children()[1]["P"].data.size(), 100 )
def testConvertGroupedCurvesAndPolygons( self ) :
result = IECoreHoudini.FromHoudiniGroupConverter( self.curveBox() ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 2 )
self.assertEqual( result.children()[0].blindData()["name"].value, "curveBoxGroup" )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[1].blindData()["name"].value, "curveBoxGroup" )
self.failUnless( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
def testConvertUngroupedCurvesAndPolygons( self ) :
group = self.curveBox()
group.bypass( True )
result = IECoreHoudini.FromHoudiniGroupConverter( group ).convert()
self.failUnless( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( len(result.children()), 2 )
self.assertEqual( result.children()[0].blindData(), IECore.CompoundData() )
self.failUnless( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertEqual( result.children()[1].blindData(), IECore.CompoundData() )
self.failUnless( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
def testFakeNameAttribute( self ) :
merge = self.buildScene()
noName = merge.createOutputNode( "attribute" )
noName.parm( "primdel" ).set( "name" )
converter = IECoreHoudini.FromHoudiniGroupConverter( noName )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.NameAttribute )
result = converter.convert()
self.assertTrue( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertEqual( len(result.children()), 3 )
for i in range( 0, 3 ) :
self.assertEqual( result.children()[i].blindData(), IECore.CompoundData() )
self.assertTrue( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertTrue( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertTrue( result.children()[2].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
def testNonPrimitiveNameAttribute( self ) :
merge = self.buildScene()
for node in merge.parent().children() :
if node.type().name() == "name" :
node.bypass( True )
attrib = merge.createOutputNode( "attribcreate" )
attrib.parm( "name1" ).set( "name" )
attrib.parm( "class1" ).set( 2 ) # Point
attrib.parm( "type1" ).set( 3 ) # String
converter = IECoreHoudini.FromHoudiniGroupConverter( attrib )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.NameAttribute )
result = converter.convert()
self.assertTrue( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertEqual( len(result.children()), 3 )
for i in range( 0, 3 ) :
self.assertEqual( result.children()[i].blindData(), IECore.CompoundData() )
self.assertTrue( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertTrue( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertTrue( result.children()[2].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
def testNonStringNameAttribute( self ) :
merge = self.buildScene()
noName = merge.createOutputNode( "attribute" )
noName.parm( "primdel" ).set( "name" )
rename = noName.createOutputNode( "attribute" )
rename.parm( "frompr0" ).set( "born" )
rename.parm( "topr0" ).set( "name" )
converter = IECoreHoudini.FromHoudiniGroupConverter( rename )
converter["groupingMode"].setTypedValue( IECoreHoudini.FromHoudiniGroupConverter.GroupingMode.NameAttribute )
result = converter.convert()
self.assertTrue( result.isInstanceOf( IECore.TypeId.Group ) )
self.assertEqual( result.blindData(), IECore.CompoundData() )
self.assertEqual( len(result.children()), 3 )
for i in range( 0, 3 ) :
self.assertEqual( result.children()[i].blindData(), IECore.CompoundData() )
self.assertTrue( result.children()[0].isInstanceOf( IECore.TypeId.MeshPrimitive ) )
self.assertTrue( result.children()[1].isInstanceOf( IECore.TypeId.CurvesPrimitive ) )
self.assertTrue( result.children()[2].isInstanceOf( IECore.TypeId.PointsPrimitive ) )
def testObjectWasDeleted( self ) :
torii = self.twoTorii()
converter = IECoreHoudini.FromHoudiniGroupConverter( torii )
g1 = converter.convert()
torii.destroy()
g2 = converter.convert()
self.assertEqual( g2, g1 )
self.assertRaises( RuntimeError, IECore.curry( IECoreHoudini.FromHoudiniGroupConverter, torii ) )
def testObjectWasDeletedFactory( self ) :
torii = self.twoTorii()
converter = IECoreHoudini.FromHoudiniGeometryConverter.create( torii )
g1 = converter.convert()
torii.destroy()
g2 = converter.convert()
self.assertEqual( g2, g1 )
self.assertRaises( RuntimeError, IECore.curry( IECoreHoudini.FromHoudiniGeometryConverter.create, torii ) )
def testAdjustedStringVectorIndices( self ) :
box = self.box()
geo = box.parent()
group = box.createOutputNode( "group" )
attr = group.createOutputNode( "attribcreate", exact_type_name=True )
attr.parm( "class" ).set( 1 ) # prim
attr.parm( "type" ).set( 3 ) # string
attr.parm( "string" ).set( "box1" )
box2 = geo.createNode( "box" )
group2 = box2.createOutputNode( "group" )
attr2 = group2.createOutputNode( "attribcreate", exact_type_name=True )
attr2.parm( "class" ).set( 1 ) # prim
attr2.parm( "type" ).set( 3 ) # string
attr2.parm( "string" ).set( "box2" )
merge = attr.createOutputNode( "merge" )
merge.setInput( 1, attr2 )
g = IECoreHoudini.FromHoudiniGroupConverter( merge ).convert()
for c in g.children() :
null = geo.createNode( "null" )
IECoreHoudini.ToHoudiniPolygonsConverter( c ).convert( null )
m = IECoreHoudini.FromHoudiniPolygonsConverter( null ).convert()
self.assertEqual( m.vertexIds, c.vertexIds )
self.assertEqual( m.verticesPerFace, c.verticesPerFace )
self.assertEqual( m.keys(), c.keys() )
for key in m.keys() :
self.assertEqual( m[key].interpolation, c[key].interpolation )
self.assertEqual( m[key].data, c[key].data )
self.assertEqual( m[key], c[key] )
def testAttributeFilter( self ) :
merge = self.buildScene()
uvunwrap = merge.createOutputNode( "uvunwrap" )
converter = IECoreHoudini.FromHoudiniGroupConverter( uvunwrap )
result = converter.convert()
for child in result.children() :
self.assertTrue( child.isInstanceOf( IECore.TypeId.Primitive ) )
self.assertEqual( sorted(child.keys()), ['Cs', 'P', 'accel', 'born', 'event', 'generator', 'generatorIndices', 'id', 'life', 'nextid', 'parent', 'pstate', 's', 'source', 't', 'v', 'varmap'] )
converter.parameters()["attributeFilter"].setTypedValue( "P" )
result = converter.convert()
for child in result.children() :
self.assertEqual( sorted(child.keys()), [ "P" ] )
converter.parameters()["attributeFilter"].setTypedValue( "* ^generator* ^varmap ^born ^event" )
result = converter.convert()
for child in result.children() :
self.assertEqual( sorted(child.keys()), ['Cs', 'P', 'accel', 'id', 'life', 'nextid', 'parent', 'pstate', 's', 'source', 't', 'v'] )
converter.parameters()["attributeFilter"].setTypedValue( "* ^P" )
result = converter.convert()
for child in result.children() :
# P must be converted
self.assertTrue( "P" in child.keys() )
# have to filter the source attr uv and not s, t
converter.parameters()["attributeFilter"].setTypedValue( "s t Cs" )
result = converter.convert()
for child in result.children() :
self.assertEqual( sorted(child.keys()), ['P'] )
converter.parameters()["attributeFilter"].setTypedValue( "uv Cd" )
result = converter.convert()
for child in result.children() :
self.assertEqual( sorted(child.keys()), ['Cs', 'P', 's', 't'] )
def testStandardAttributeConversion( self ) :
merge = self.buildScene()
color = merge.createOutputNode( "color" )
color.parm( "colortype" ).set( 2 )
rest = color.createOutputNode( "rest" )
scale = rest.createOutputNode( "attribcreate" )
scale.parm( "name1" ).set( "pscale" )
scale.parm( "value1v1" ).setExpression( "$PT" )
uvunwrap = scale.createOutputNode( "uvunwrap" )
converter = IECoreHoudini.FromHoudiniGroupConverter( uvunwrap )
result = converter.convert()
for child in result.children() :
self.assertEqual( child.keys(), ['Cs', 'P', 'Pref', 'accel', 'born', 'event', 'generator', 'generatorIndices', 'id', 'life', 'nextid', 'parent', 'pstate', 's', 'source', 't', 'v', 'varmap', 'width'] )
self.assertTrue( child.arePrimitiveVariablesValid() )
self.assertEqual( child["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( child["Pref"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( child["accel"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
self.assertEqual( child["v"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
converter["convertStandardAttributes"].setTypedValue( False )
result = converter.convert()
for child in result.children() :
self.assertEqual( child.keys(), ['Cd', 'P', 'accel', 'born', 'event', 'generator', 'generatorIndices', 'id', 'life', 'nextid', 'parent', 'pscale', 'pstate', 'rest', 'source', 'uv', 'v', 'varmap'] )
self.assertTrue( child.arePrimitiveVariablesValid() )
self.assertEqual( child["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( child["rest"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( child["accel"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
self.assertEqual( child["v"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
def testInterpretation( self ) :
merge = self.buildScene()
converter = IECoreHoudini.FromHoudiniGroupConverter( merge )
result = converter.convert()
for child in result.children() :
self.assertTrue( child.isInstanceOf( IECore.TypeId.Primitive ) )
self.assertEqual( sorted(child.keys()), ['Cs', 'P', 'accel', 'born', 'event', 'generator', 'generatorIndices', 'id', 'life', 'nextid', 'parent', 'pstate', 'source', 'v', 'varmap'] )
self.assertEqual( child["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
self.assertEqual( child["accel"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
self.assertEqual( child["life"].data.getInterpretation(), IECore.GeometricData.Interpretation.Numeric )
self.assertEqual( child["v"].data.getInterpretation(), IECore.GeometricData.Interpretation.Vector )
if __name__ == "__main__":
unittest.main()
|
code-google-com/cortex-vfx
|
test/IECoreHoudini/FromHoudiniGroupConverter.py
|
Python
|
bsd-3-clause
| 30,705
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
def __cameraSummary( plug ) :
info = []
if plug["renderCamera"]["enabled"].getValue() :
info.append( plug["renderCamera"]["value"].getValue() )
if plug["renderResolution"]["enabled"].getValue() :
resolution = plug["renderResolution"]["value"].getValue()
info.append( "%dx%d" % ( resolution[0], resolution[1] ) )
return ", ".join( info )
def __motionBlurSummary( plug ) :
info = []
if plug["cameraBlur"]["enabled"].getValue() :
info.append( "Camera " + ( "On" if plug["cameraBlur"]["value"].getValue() else "Off" ) )
if plug["transformBlur"]["enabled"].getValue() :
info.append( "Transform " + ( "On" if plug["transformBlur"]["value"].getValue() else "Off" ) )
if plug["deformationBlur"]["enabled"].getValue() :
info.append( "Deformation " + ( "On" if plug["deformationBlur"]["value"].getValue() else "Off" ) )
if plug["shutter"]["enabled"].getValue() :
info.append( "Shutter " + str( plug["shutter"]["value"].getValue() ) )
return ", ".join( info )
GafferUI.PlugValueWidget.registerCreator(
GafferScene.StandardOptions.staticTypeId(),
"options",
GafferUI.SectionedCompoundDataPlugValueWidget,
sections = (
{
"label" : "Camera",
"summary" : __cameraSummary,
"namesAndLabels" : (
( "render:camera", "Camera" ),
( "render:resolution", "Resolution" ),
),
},
{
"label" : "Motion Blur",
"summary" : __motionBlurSummary,
"namesAndLabels" : (
( "render:cameraBlur", "Camera" ),
( "render:transformBlur", "Transform" ),
( "render:deformationBlur", "Deformation" ),
( "render:shutter", "Shutter" ),
),
},
),
)
GafferUI.PlugValueWidget.registerCreator(
GafferScene.StandardOptions.staticTypeId(),
"options.renderCamera.value",
lambda plug : GafferUI.PathPlugValueWidget(
plug,
path = GafferScene.ScenePath( plug.node()["in"], plug.node().scriptNode().context(), "/" ),
),
)
|
paulondc/gaffer
|
python/GafferSceneUI/StandardOptionsUI.py
|
Python
|
bsd-3-clause
| 3,747
|
from itertools import chain, imap, groupby
import re
from dhcp_objects import (Option, Group, Host, Parameter, Pool, Allow,
Deny, Subnet, )
key_table = [(Option, 'options'),
(Group, 'groups'),
(Host, 'hosts'),
(Parameter, 'parameters'),
(Pool, 'pools'),
(Allow, 'allow'),
(Deny, 'deny'),
(Subnet, 'subnets'),
('mac', 'mac'),
('start', 'start'),
('end', 'end'),
('fqdn', 'fqdn'),
('match' , 'match')]
def get_key(obj):
for o, k in key_table:
if obj is o:
return k
raise Exception("key {0} was not found".format(obj))
def prepare_arguments(attrs, exclude_list=None, **kwargs):
exclude_list = exclude_list or []
new_kwargs = {}
dicts = [d for d in attrs if type(d) is dict]
kwargs.update(dict(chain(*map(lambda x: x.items(), dicts))))
attrs = [a for a in attrs if not (type(a) is dict)]
for k, g in groupby(sorted(attrs, key=type), type):
key = get_key(k)
kwargs[key] = list(g) if not key in exclude_list else list(g)[0]
return dict(new_kwargs.items() + kwargs.items())
mac_match = "(([0-9a-fA-F]){2}:){5}([0-9a-fA-F]){2}$"
is_mac = re.compile(mac_match)
is_ip = re.compile("\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}")
|
drkitty/cyder
|
cyder/management/commands/lib/dhcpd_compare/utils.py
|
Python
|
bsd-3-clause
| 1,370
|
"""tests/test_output_format.py.
Tests the output format handlers included with Hug
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import hug
from datetime import datetime
def test_text():
'''Ensure that it's possible to output a Hug API method as text'''
hug.output_format.text("Hello World!") == "Hello World!"
hug.output_format.text(str(1)) == "1"
def test_json():
'''Ensure that it's possible to output a Hug API method as JSON'''
now = datetime.now()
test_data = {'text': 'text', 'datetime': now, 'bytes': b'bytes'}
output = hug.output_format.json(test_data).decode('utf8')
assert 'text' in output
assert 'bytes' in output
assert now.isoformat() in output
class NewObject(object):
pass
test_data['non_serializable'] = NewObject()
with pytest.raises(TypeError):
hug.output_format.json(test_data).decode('utf8')
def test_pretty_json():
'''Ensure that it's possible to output a Hug API method as prettified and indented JSON'''
test_data = {'text': 'text'}
assert hug.output_format.pretty_json(test_data).decode('utf8') == ('{\n'
' "text": "text"\n'
'}')
def test_json_camelcase():
'''Ensure that it's possible to output a Hug API method as camelCased JSON'''
test_data = {'under_score': {'values_can': 'Be Converted'}}
output = hug.output_format.json_camelcase(test_data).decode('utf8')
assert 'underScore' in output
assert 'valuesCan' in output
assert 'Be Converted' in output
|
jean/hug
|
tests/test_output_format.py
|
Python
|
mit
| 2,679
|
from core.himesis import Himesis, HimesisPostConditionPattern
import cPickle as pickle
from uuid import UUID
class HMoveOneInputRepeatedDirectApplyDiffRulesRHS(HimesisPostConditionPattern):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HMoveOneInputRepeatedDirectApplyDiffRulesRHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HMoveOneInputRepeatedDirectApplyDiffRulesRHS, self).__init__(name='HMoveOneInputRepeatedDirectApplyDiffRulesRHS', num_nodes=4, edges=[])
# Add the edges
self.add_edges([(3, 0), (0, 1)])
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_post__GM2AUTOSAR_MM'
p2
aS'MoTifRule'
p3
a.""")
self["MT_action__"] = pickle.loads("""V#===============================================================================\u000a# This code is executed after the rule has been applied.\u000a# You can access a node labelled n matched by this rule by: PostNode('n').\u000a# To access attribute x of node n, use: PostNode('n')['x'].\u000a#===============================================================================\u000a\u000apass\u000a
p1
.""")
self["name"] = """"""
self["GUID__"] = UUID('ae1025d2-ecc6-48d3-8c47-97eda9795889')
# Set the node attributes
self.vs[0]["MT_post__associationType"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[0]["MT_label__"] = """10"""
self.vs[0]["mm__"] = """MT_post__directLink_T"""
self.vs[0]["GUID__"] = UUID('0227b567-0de7-4da0-8c95-82deb30ecdfb')
self.vs[1]["MT_pivotOut__"] = """element1"""
self.vs[1]["MT_post__cardinality"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[1]["MT_label__"] = """3"""
self.vs[1]["MT_post__name"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[1]["mm__"] = """MT_post__MetaModelElement_T"""
self.vs[1]["MT_post__classtype"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[1]["GUID__"] = UUID('9f4ea8b8-2bba-413f-ba8e-77ed2cee7b39')
self.vs[2]["MT_pivotOut__"] = """element2"""
self.vs[2]["MT_post__cardinality"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[2]["MT_label__"] = """4"""
self.vs[2]["MT_post__name"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[2]["mm__"] = """MT_post__MetaModelElement_T"""
self.vs[2]["MT_post__classtype"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[2]["GUID__"] = UUID('2d698715-d619-43f4-9b57-50b02d3565da')
self.vs[3]["MT_post__cardinality"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[3]["MT_label__"] = """5"""
self.vs[3]["MT_post__name"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[3]["mm__"] = """MT_post__MetaModelElement_T"""
self.vs[3]["MT_post__classtype"] = """
#===============================================================================
# You can access the value of the current node's attribute value by: attr_value.
# If the current node shall be created you MUST initialize it here!
# You can access a node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# Note that the attribute values are those before the match is rewritten.
# The order in which this code is executed depends on the label value
# of the encapsulating node.
# The given action must return the new value of the attribute.
#===============================================================================
return attr_value
"""
self.vs[3]["GUID__"] = UUID('fdc3db7a-c0c3-4671-88c1-384e51857cf1')
from HMoveOneInputRepeatedDirectApplyDiffRulesLHS import HMoveOneInputRepeatedDirectApplyDiffRulesLHS
self.pre = HMoveOneInputRepeatedDirectApplyDiffRulesLHS()
def action(self, PostNode, graph):
"""
Executable constraint code.
@param PostNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the rule has been applied.
# You can access a node labelled n matched by this rule by: PostNode('n').
# To access attribute x of node n, use: PostNode('n')['x'].
#===============================================================================
pass
def execute(self, packet, match):
"""
Transforms the current match of the packet according to the rule %s.
Pivots are also assigned, if any.
@param packet: The input packet.
@param match: The match to rewrite.
"""
graph = packet.graph
# Build a dictionary {label: node index} mapping each label of the pattern to a node in the graph to rewrite.
# Because of the uniqueness property of labels in a rule, we can store all LHS labels
# and subsequently add the labels corresponding to the nodes to be created.
labels = match.copy()
#===============================================================================
# Update attribute values
#===============================================================================
#===============================================================================
# Create new nodes
#===============================================================================
#===============================================================================
# Create new edges
#===============================================================================
#===============================================================================
# Set the output pivots
#===============================================================================
# MetaModelElement_T3
packet.global_pivots['element1'] = graph.vs[labels['3']][Himesis.Constants.GUID]
# MetaModelElement_T4
packet.global_pivots['element2'] = graph.vs[labels['4']][Himesis.Constants.GUID]
#===============================================================================
# Perform the post-action
#===============================================================================
try:
self.action(lambda i: graph.vs[labels[i]], graph)
except Exception, e:
raise Exception('An error has occurred while applying the post-action', e)
#===============================================================================
# Finally, delete nodes (this will automatically delete the adjacent edges)
#===============================================================================
# MT_pre__directLink_T9
graph.delete_nodes([labels["9"]])
|
levilucio/SyVOLT
|
UMLRT2Kiltera_MM/merge_preprocess_rules/Himesis/HMoveOneInputRepeatedDirectApplyDiffRulesRHS.py
|
Python
|
mit
| 12,860
|
#-------------------------------------------------------------------------------
# elftools: port of OrderedDict to work on Python < 2.7
#
# Taken from http://code.activestate.com/recipes/576693/ , revision 9
# Code by Raymond Hettinger. License: MIT
#-------------------------------------------------------------------------------
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
g0t3n/Ksymhunter-gui
|
utils/elftools/common/ordereddict.py
|
Python
|
gpl-2.0
| 8,998
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2007 Lukáš Lalinský
# Copyright (C) 2006 Matthias Friedrich
# Copyright (C) 2013 Laurent Monin
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
try:
# use python-libdiscid (http://pythonhosted.org/python-libdiscid/)
from libdiscid.compat import discid
except ImportError:
try:
# use python-discid (http://python-discid.readthedocs.org/en/latest/)
import discid
except ImportError:
discid = None
import traceback
from PyQt4 import QtCore
from picard import log
from picard.ui.cdlookup import CDLookupDialog
class Disc(QtCore.QObject):
def __init__(self):
QtCore.QObject.__init__(self)
self.id = None
self.submission_url = None
def read(self, device=None):
if device is None:
device = discid.get_default_device()
log.debug(u"Reading CD using device: %r", device)
disc = discid.read(device)
self.id = disc.id
self.submission_url = disc.submission_url
def lookup(self):
self.tagger.xmlws.lookup_discid(self.id, self._lookup_finished)
def _lookup_finished(self, document, http, error):
self.tagger.restore_cursor()
releases = []
if error:
log.error("%r", unicode(http.errorString()))
else:
try:
releases = document.metadata[0].disc[0].release_list[0].release
except (AttributeError, IndexError):
log.error(traceback.format_exc())
dialog = CDLookupDialog(releases, self, parent=self.tagger.window)
dialog.exec_()
if discid is not None:
discid_version = "discid %s, %s" % (discid.__version__,
discid.LIBDISCID_VERSION_STRING)
else:
discid_version = None
|
dufferzafar/picard
|
picard/disc.py
|
Python
|
gpl-2.0
| 2,517
|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from lxml import etree
from calibre.ebooks.oeb.base import (urlnormalize, XPath, XHTML_NS, XHTML,
XHTML_MIME)
DEFAULT_TITLE = __('Table of Contents')
TEMPLATE = '''
<html xmlns="{xhtmlns}">
<head>
<title>{title}</title>
<style type="text/css">
li {{ list-style-type: none }}
a {{ text-decoration: none }}
a:hover {{ color: red }}
{extra_css}
{embed_css}
</style>
</head>
<body id="calibre_generated_inline_toc">
<h2>{title}</h2>
<ul>
</ul>
</body>
</html>
'''
def find_previous_calibre_inline_toc(oeb):
if 'toc' in oeb.guide:
href = urlnormalize(oeb.guide['toc'].href.partition('#')[0])
if href in oeb.manifest.hrefs:
item = oeb.manifest.hrefs[href]
if (hasattr(item.data, 'xpath') and XPath('//h:body[@id="calibre_generated_inline_toc"]')(item.data)):
return item
class TOCAdder(object):
def __init__(self, oeb, opts, replace_previous_inline_toc=False, ignore_existing_toc=False):
self.oeb, self.opts, self.log = oeb, opts, oeb.log
self.title = opts.toc_title or DEFAULT_TITLE
self.at_start = opts.mobi_toc_at_start
self.generated_item = None
self.added_toc_guide_entry = False
self.has_toc = oeb.toc and oeb.toc.count() > 1
self.tocitem = tocitem = None
if find_previous_calibre_inline_toc:
tocitem = self.tocitem = find_previous_calibre_inline_toc(oeb)
if ignore_existing_toc and 'toc' in oeb.guide:
oeb.guide.remove('toc')
if 'toc' in oeb.guide:
# Remove spurious toc entry from guide if it is not in spine or it
# does not have any hyperlinks
href = urlnormalize(oeb.guide['toc'].href.partition('#')[0])
if href in oeb.manifest.hrefs:
item = oeb.manifest.hrefs[href]
if (hasattr(item.data, 'xpath') and
XPath('//h:a[@href]')(item.data)):
if oeb.spine.index(item) < 0:
oeb.spine.add(item, linear=False)
return
elif self.has_toc:
oeb.guide.remove('toc')
else:
oeb.guide.remove('toc')
if (not self.has_toc or 'toc' in oeb.guide or opts.no_inline_toc or
getattr(opts, 'mobi_passthrough', False)):
return
self.log('\tGenerating in-line ToC')
embed_css = ''
s = getattr(oeb, 'store_embed_font_rules', None)
if getattr(s, 'body_font_family', None):
css = [x.cssText for x in s.rules] + [
'body { font-family: %s }'%s.body_font_family]
embed_css = '\n\n'.join(css)
root = etree.fromstring(TEMPLATE.format(xhtmlns=XHTML_NS,
title=self.title, embed_css=embed_css,
extra_css=(opts.extra_css or '')))
parent = XPath('//h:ul')(root)[0]
parent.text = '\n\t'
for child in self.oeb.toc:
self.process_toc_node(child, parent)
if tocitem is not None:
href = tocitem.href
if oeb.spine.index(tocitem) > -1:
oeb.spine.remove(tocitem)
tocitem.data = root
else:
id, href = oeb.manifest.generate('contents', 'contents.xhtml')
tocitem = self.generated_item = oeb.manifest.add(id, href, XHTML_MIME,
data=root)
if self.at_start:
oeb.spine.insert(0, tocitem, linear=True)
else:
oeb.spine.add(tocitem, linear=False)
oeb.guide.add('toc', 'Table of Contents', href)
def process_toc_node(self, toc, parent, level=0):
li = parent.makeelement(XHTML('li'))
li.tail = '\n'+ ('\t'*level)
parent.append(li)
href = toc.href
if self.tocitem is not None and href:
href = self.tocitem.relhref(toc.href)
a = parent.makeelement(XHTML('a'), href=href or '#')
a.text = toc.title
li.append(a)
if toc.count() > 0:
parent = li.makeelement(XHTML('ul'))
li.append(parent)
a.tail = '\n' + ('\t'*level)
parent.text = '\n'+('\t'*(level+1))
parent.tail = '\n' + ('\t'*level)
for child in toc:
self.process_toc_node(child, parent, level+1)
def remove_generated_toc(self):
if self.generated_item is not None:
self.oeb.manifest.remove(self.generated_item)
self.generated_item = None
if self.added_toc_guide_entry:
self.oeb.guide.remove('toc')
self.added_toc_guide_entry = False
|
alexston/calibre-webserver
|
src/calibre/ebooks/mobi/writer8/toc.py
|
Python
|
gpl-3.0
| 4,948
|
#!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
'''
Element Software Initialize Cluster
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_cluster
short_description: NetApp Element Software Create Cluster
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Initialize Element Software node ownership to form a cluster.
options:
management_virtual_ip:
description:
- Floating (virtual) IP address for the cluster on the management network.
required: true
storage_virtual_ip:
description:
- Floating (virtual) IP address for the cluster on the storage (iSCSI) network.
required: true
replica_count:
description:
- Number of replicas of each piece of data to store in the cluster.
default: '2'
cluster_admin_username:
description:
- Username for the cluster admin.
- If not provided, default to login username.
cluster_admin_password:
description:
- Initial password for the cluster admin account.
- If not provided, default to login password.
accept_eula:
description:
- Required to indicate your acceptance of the End User License Agreement when creating this cluster.
- To accept the EULA, set this parameter to true.
type: bool
nodes:
description:
- Storage IP (SIP) addresses of the initial set of nodes making up the cluster.
- nodes IP must be in the list.
attributes:
description:
- List of name-value pairs in JSON object format.
'''
EXAMPLES = """
- name: Initialize new cluster
tags:
- elementsw_cluster
na_elementsw_cluster:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
management_virtual_ip: 10.226.108.32
storage_virtual_ip: 10.226.109.68
replica_count: 2
cluster_admin_username: admin
cluster_admin_password: netapp123
accept_eula: true
nodes:
- 10.226.109.72
- 10.226.109.74
"""
RETURN = """
msg:
description: Success message
returned: success
type: string
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class ElementSWCluster(object):
"""
Element Software Initialize node with ownership for cluster formation
"""
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
management_virtual_ip=dict(required=True, type='str'),
storage_virtual_ip=dict(required=True, type='str'),
replica_count=dict(required=False, type='str', default='2'),
cluster_admin_username=dict(required=False, type='str'),
cluster_admin_password=dict(required=False, type='str', no_log=True),
accept_eula=dict(required=True, type='bool'),
nodes=dict(required=False, type=list, default=None),
attributes=dict(required=False, type=list, default=None)
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
input_params = self.module.params
self.management_virtual_ip = input_params['management_virtual_ip']
self.storage_virtual_ip = input_params['storage_virtual_ip']
self.replica_count = input_params['replica_count']
self.accept_eula = input_params['accept_eula']
self.attributes = input_params['attributes']
self.nodes = input_params['nodes']
if input_params['cluster_admin_username'] is None:
self.cluster_admin_username = self.username
else:
self.cluster_admin_username = input_params['cluster_admin_username']
if input_params['cluster_admin_password']:
self.cluster_admin_password = self.password
else:
self.cluster_admin_password = input_params['cluster_admin_password']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module, port=442)
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
if self.attributes is not None:
self.attributes.update(self.elementsw_helper.set_element_attributes(source='na_elementsw_cluster'))
else:
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_cluster')
def create_cluster(self):
"""
Create Cluster
"""
try:
self.sfe.create_cluster(mvip=self.management_virtual_ip,
svip=self.storage_virtual_ip,
rep_count=self.replica_count,
username=self.cluster_admin_username,
password=self.cluster_admin_password,
accept_eula=self.accept_eula,
nodes=self.nodes,
attributes=self.attributes)
except Exception as exception_object:
self.module.fail_json(msg='Error create cluster %s' % (to_native(exception_object)),
exception=traceback.format_exc())
def check_connection(self):
"""
Check connections to mvip, svip address.
:description: To test connection to given IP addressed for mvip and svip
:rtype: bool
"""
try:
mvip_test = self.sfe.test_connect_mvip(mvip=self.management_virtual_ip)
svip_test = self.sfe.test_connect_svip(svip=self.storage_virtual_ip)
if mvip_test.details.connected and svip_test.details.connected:
return True
else:
return False
except Exception as e:
return False
def apply(self):
"""
Check connection and initialize node with cluster ownership
"""
changed = False
result_message = None
if self.module.supports_check_mode and self.accept_eula:
if self.check_connection():
self.create_cluster()
changed = True
else:
self.module.fail_json(msg='Error connecting mvip and svip address')
else:
result_message = "Skipping changes, No change requested"
self.module.exit_json(changed=changed, msg=result_message)
def main():
"""
Main function
"""
na_elementsw_cluster = ElementSWCluster()
na_elementsw_cluster.apply()
if __name__ == '__main__':
main()
|
veger/ansible
|
lib/ansible/modules/storage/netapp/na_elementsw_cluster.py
|
Python
|
gpl-3.0
| 7,424
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection, ConnectionError
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.common.utils import load_provider
from ansible.module_utils.network.ios.ios import ios_provider_spec
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
socket_path = None
if self._play_context.connection == 'network_cli':
provider = self._task.args.get('provider', {})
if any(provider.values()):
display.warning('provider is unnecessary when using network_cli and will be ignored')
del self._task.args['provider']
elif self._play_context.connection == 'local':
provider = load_provider(ios_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ios'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
command_timeout = int(provider['timeout']) if provider['timeout'] else connection.get_option('persistent_command_timeout')
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
else:
return {'failed': True, 'msg': 'Connection type %s is not valid for this module' % self._play_context.connection}
# make sure we are in the right cli context which should be
# enable mode and not config module
if socket_path is None:
socket_path = self._connection.socket_path
conn = Connection(socket_path)
try:
out = conn.get_prompt()
while to_text(out, errors='surrogate_then_replace').strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
except ConnectionError as exc:
return {'failed': True, 'msg': to_text(exc)}
result = super(ActionModule, self).run(task_vars=task_vars)
return result
|
veger/ansible
|
lib/ansible/plugins/action/ios.py
|
Python
|
gpl-3.0
| 4,289
|
# -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email legal@cyan.com
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
"""
Module: nglnUrwinBrain
Age: Negilahn
Date: January 2004
Author: Doug McBride
HaX0r3d: Derek Odell
Controls the appearance and behavior of the Negilahn Urwin Bird
"""
from Plasma import *
from PlasmaTypes import *
import random
# define the attributes that will be entered in max
UrwinMasterAnim = ptAttribAnimation(1, "Urwin Master Anim", netForce=1)
respUrwinVocalize = ptAttribResponder(2, "resp: Urwin Vocalize")
respUrwinIdle = ptAttribResponder(3, "resp: Urwin Idles")
respUrwinIdleToWalk = ptAttribResponder(4, "resp: Urwin Idle To Walk")
respUrwinWalkLoop = ptAttribResponder(5, "resp: Urwin Walk Loop")
respUrwinWalkToIdle = ptAttribResponder(6, "resp: Urwin Walk To Idle")
respUrwinEat = ptAttribResponder(7, "resp: Urwin Eats")
respUrwinSfx = ptAttribNamedResponder(8, "resp: Urwin SFX", ['Eat','Idle','IdleToWalk','WalkLoop','WalkToIdle','Vocalize','Distance','Appear'], netForce=1)
actUrwinPathEnd = ptAttribActivator(9, "act: Urwin Path End")
# define globals
kDayLengthInSeconds = 56585 # Length of a Negilahn day in seconds Must match value in Negilahn.age file
kMinimumTimeBetweenSpawns = 3600 # 1 hour
kMaximumTimeBetweenSpawns = 25200 # 7 hours
# We need the first random spawn time in the first 5 hours of the day
# which is in the first 44.5 percent of the day. So we're
# generating a number from 0 to 445 and later we divide by 1000 to get
# something roughly in that timeframe.
kFirstMorningSpawn = 445
minsteps = 3
maxsteps = 10
StepsToTake = 0
stackList = []
class nglnUrwinBrain(ptResponder):
############################
def __init__(self):
ptResponder.__init__(self)
self.id = 5244
version = 4
self.version = version
print "__init__nglnUrwinBrain v.", version,".0"
random.seed()
############################
def OnServerInitComplete(self):
try:
ageSDL = PtGetAgeSDL()
except:
print "nglnUrwinBrain:\tERROR---Cannot find the Negilahn Age SDL"
self.InitNewSDLVars()
ageSDL.sendToClients("UrwinLastUpdated")
ageSDL.sendToClients("UrwinSpawnTimes")
ageSDL.sendToClients("UrwinOnTheProwl")
ageSDL.setFlags("UrwinLastUpdated", 1, 1)
ageSDL.setFlags("UrwinSpawnTimes", 1, 1)
ageSDL.setFlags("UrwinOnTheProwl", 1, 1)
ageSDL.setFlags("nglnPodLights", 1, 1)
ageSDL.setNotify(self.key, "UrwinLastUpdated", 0.0)
ageSDL.setNotify(self.key, "UrwinSpawnTimes", 0.0)
ageSDL.setNotify(self.key, "UrwinOnTheProwl", 0.0)
ageSDL.setNotify(self.key, "nglnPodLights", 0.0)
thisDay = int(PtGetDniTime() / kDayLengthInSeconds)
lastDay = int(ageSDL["UrwinLastUpdated"][0] / kDayLengthInSeconds)
if (thisDay - lastDay) > 0:
print "nglnUrwinBrain: It's been at least a day since the last update, running new numbers now."
self.InitNewSDLVars()
else:
print "nglnUrwinBrain: It's been less than a day since the last update, doing nothing"
self.SetUrwinTimers()
if not len(PtGetPlayerList()):
UrwinMasterAnim.animation.skipToBegin()
###########################
def OnSDLNotify(self,VARname,SDLname,playerID,tag):
if VARname == "UrwinSpawnTimes":
self.SetUrwinTimers()
#Had to wire this to stop SFX if the lights go off....
elif VARname == "nglnPodLights":
ageSDL = PtGetAgeSDL()
if not ageSDL[VARname][0] and ageSDL["UrwinOnTheProwl"][0]:
respUrwinSfx.run(self.key, state="Idle")
############################
def OnNotify(self,state,id,events):
global StepsToTake
global stackList
ageSDL = PtGetAgeSDL()
print "nglnUrwinBrain.OnNotify: state=%f id=%d owned=%s prowl=%s events=" % (state,id,str(self.sceneobject.isLocallyOwned()),str(ageSDL["UrwinOnTheProwl"][0])),events
if id == (-1):
print "Need to store event: %s" % (events[0][1])
stackList.append(events[0][1])
print "New list is: %s" % (str(stackList))
if len(stackList) == 1:
print "List is only one command long, so I'm playing it"
code = stackList[0]
print "Playing command: %s" % (code)
self.ExecCode(code)
elif state and self.sceneobject.isLocallyOwned() and ageSDL["UrwinOnTheProwl"][0]:
if id == respUrwinSfx.id:
print "Callback was from Appearance SFX, and I own the age, so start walking"
self.StartToWalk()
else:
print "Callback was from responder, and I own the age, so Logic Time"
old = stackList.pop(0)
print "Popping off: %s" % (old)
boolBatteryChargedAndOn = ageSDL["nglnPodLights"][0]
if id == respUrwinIdleToWalk.id:
self.StartToWalk()
elif id == respUrwinWalkLoop.id:
StepsToTake = StepsToTake - 1
if StepsToTake:
# 90% chance of continuing walk loop
if random.randint(0,9):
print "Urwin will take %d more steps..." % (StepsToTake)
self.SendNote("respUrwinWalkLoop")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="WalkLoop")
# 10% to eat
else:
print "Urwin is hungry and decides to eat"
self.SendNote("respUrwinEat")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="Eat")
else:
print "Urwin is tired and stops walking"
PtAtTimeCallback(self.key, 0.666, 3)
self.SendNote("respUrwinWalkToIdle")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="WalkToIdle")
elif id == respUrwinEat.id:
print "Urwin is done eating and continues walking"
self.SendNote("respUrwinWalkLoop")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="WalkLoop")
elif id == respUrwinWalkToIdle.id:
self.RandomBehavior()
elif id == respUrwinIdle.id or id == respUrwinVocalize.id:
# 66% chance of picking another random behaviour
if random.randint(0,2):
self.RandomBehavior()
# 33% to go back to walking
else:
print "Urwin is rested and goes back to walking"
self.SendNote("respUrwinIdleToWalk")
UrwinMasterAnim.animation.resume()
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="IdleToWalk")
elif id == actUrwinPathEnd.id:
print "End of the line, Urwin!"
UrwinMasterAnim.animation.stop()
UrwinMasterAnim.animation.skipToTime(0)
ageSDL["UrwinOnTheProwl"] = (0,)
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="Distance")
elif id in range(2,8) and not self.sceneobject.isLocallyOwned():
print "Callback was from responder, and I DON'T own the age, so I'll try playing the next item in list"
old = stackList.pop(0)
print "Popping off: %s" % (old)
if len(stackList):
print "List has at least one item ready to play"
code = stackList[0]
print "Playing command: %s" % (code)
self.ExecCode(code)
else:
print "Callback from something else?"
############################
def RandomBehavior(self):
ageSDL = PtGetAgeSDL()
boolBatteryChargedAndOn = ageSDL["nglnPodLights"][0]
# 66% chance of idling
if random.randint(0,2):
print "Urwin is being lazy and just idling"
self.SendNote("respUrwinIdle")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="Idle")
# 33% to vocalize
else:
print "Urwin is calling home."
self.SendNote("respUrwinVocalize")
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="Vocalize")
############################
def StartToWalk(self):
global StepsToTake
ageSDL = PtGetAgeSDL()
boolBatteryChargedAndOn = ageSDL["nglnPodLights"][0]
StepsToTake = random.randint(minsteps, maxsteps)
print "Urwin has decided to take %d steps." % (StepsToTake)
self.SendNote("respUrwinWalkLoop")
UrwinMasterAnim.animation.resume()
if boolBatteryChargedAndOn:
respUrwinSfx.run(self.key, state="WalkLoop")
############################
def OnTimer(self,TimerID):
global UrwinOnTheProwl
ageSDL = PtGetAgeSDL()
boolBatteryChargedAndOn = ageSDL["nglnPodLights"][0]
if self.sceneobject.isLocallyOwned():
if TimerID == 1:
print "UrwinBrain.OnTimer: Time for the Urwin to return."
ageSDL["UrwinOnTheProwl"] = (1,)
if ageSDL["nglnPodLights"][0]:
respUrwinSfx.run(self.key, state="Appear")
else:
self.StartToWalk()
elif TimerID == 2:
print "UrwinBrain.OnTimer: New day, let's renew the timers."
self.InitNewSDLVars()
elif TimerID == 3:
UrwinMasterAnim.animation.stop()
###########################
def SendNote(self, ExtraInfo):
#pythonBox = PtFindSceneobject("Dummy04", "Negilahn")
#pmlist = pythonBox.getPythonMods()
#for pm in pmlist:
notify = ptNotify(self.key)
notify.clearReceivers()
#notify.addReceiver(pm)
notify.addReceiver(self.key)
notify.netPropagate(1)
notify.netForce(1)
notify.setActivate(1.0)
notify.addVarNumber(str(ExtraInfo),1.0)
notify.send()
###########################
def InitNewSDLVars(self):
ageSDL = PtGetAgeSDL()
ageSDL["UrwinLastUpdated"] = (PtGetDniTime(),)
beginningOfToday = PtGetDniTime() - int(PtGetAgeTimeOfDayPercent() * kDayLengthInSeconds)
endOfToday = int(kDayLengthInSeconds / 2) + beginningOfToday
#print "Dawn: %d Dusk: %d" % (beginningOfToday, endOfToday)
# We need a random times in the first 5 hours of the day
# which is in the first 44.5 percent of the day. So we're
# generating a number from 0 to 445 and dividing by 1000 to get
# something roughly in that timeframe.
randnum = float(random.randint(0,kFirstMorningSpawn))
firstTime = int((randnum / 1000.0) * kDayLengthInSeconds) + beginningOfToday
print "nglnUrwinBrain: Generated a valid spawn time: %d" % (firstTime)
spawnTimes = [firstTime]
while type(spawnTimes[-1]) == type(long(1)):
randnum = random.randint(kMinimumTimeBetweenSpawns, kMaximumTimeBetweenSpawns)
newTime = spawnTimes[-1] + randnum
if newTime < endOfToday:
print "nglnUrwinBrain: Generated a valid spawn time: %d" % (newTime)
spawnTimes.append(newTime)
else:
print "nglnUrwinBrain: Generated a spawn time after dusk, exiting loop: %d" % (newTime)
break
else:
print "nglnUrwinBrain:ERROR---Tried to add a spawn time that's not a number: " , spawnTimes
spawnTimes = [0]
while len(spawnTimes) < 20:
spawnTimes.append(0)
ageSDL["UrwinSpawnTimes"] = tuple(spawnTimes)
###########################
def SetUrwinTimers(self):
PtClearTimerCallbacks(self.key)
ageSDL = PtGetAgeSDL()
if ageSDL["UrwinSpawnTimes"][0]:
for timer in ageSDL["UrwinSpawnTimes"]:
if timer:
timeTillSpawn = timer - PtGetDniTime()
print "timer: %d time: %d timeTillSpawn: %d" % (timer,PtGetDniTime(),timeTillSpawn)
if timeTillSpawn > 0:
print "nglnUrwinBrain: Setting timer for %d seconds" % (timeTillSpawn)
PtAtTimeCallback(self.key, timeTillSpawn, 1)
# precision error FTW!
timeLeftToday = kDayLengthInSeconds - int(PtGetAgeTimeOfDayPercent() * kDayLengthInSeconds)
timeLeftToday += 1 # because we want it to go off right AFTER the day flips
print "nglnUrwinBrain: Setting EndOfDay timer for %d seconds" % (timeLeftToday)
PtAtTimeCallback(self.key, timeLeftToday, 2)
else:
print "nglnUrwinBrain: Timer array was empty!"
###########################
def OnBackdoorMsg(self, target, param):
global kMinimumTimeBetweenSpawns
global kMaximumTimeBetweenSpawns
global kFirstMorningSpawn
ageSDL = PtGetAgeSDL()
if target == "urwin":
if self.sceneobject.isLocallyOwned():
print "nglnUrwinBrain.OnBackdoorMsg: Backdoor!"
if param == "walk":
ageSDL["UrwinOnTheProwl"] = (1,)
if ageSDL["nglnPodLights"][0]:
PtAtTimeCallback(self.key, 1, 1)
else:
self.StartToWalk()
elif param == "restore":
kMinimumTimeBetweenSpawns = 3600 # 1 hour
kMaximumTimeBetweenSpawns = 25200 # 7 hours
kFirstMorningSpawn = 445
self.InitNewSDLVars()
elif type(param) == type(""):
newTimes = param.split(";")
kMinimumTimeBetweenSpawns = int(newTimes[0])
kMaximumTimeBetweenSpawns = int(newTimes[1])
kFirstMorningSpawn = 1
self.InitNewSDLVars()
def ExecCode(self, code):
global stackList
if code == "respUrwinIdle":
respUrwinIdle.run(self.key)
elif code == "respUrwinIdleToWalk":
respUrwinIdleToWalk.run(self.key)
elif code == "respUrwinWalkLoop":
respUrwinWalkLoop.run(self.key)
elif code == "respUrwinWalkToIdle":
respUrwinWalkToIdle.run(self.key)
elif code == "respUrwinEat":
respUrwinEat.run(self.key)
elif code == "respUrwinVocalize":
respUrwinVocalize.run(self.key)
else:
print "nglnUrwinBrain.ExecCode(): ERROR! Invalid code '%s'." % (code)
stackList.pop(0)
|
TOC-Shard/moul-scripts
|
Python/nglnUrwinBrain.py
|
Python
|
gpl-3.0
| 17,059
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""Start supervisord."""
import os
import sys
from utilities.utils import os_exec, is_running, get_tmpdir, get_rootdir
import utilities.localendpoints as local
ROOT_DIR = get_rootdir()
TMP_DIR = get_tmpdir()
def write_conf(tpl, inet_http_server_port):
"""Write out filled in conf template in tmp."""
template = open(os.path.join(ROOT_DIR, tpl)).read()
template_vars = {}
template_vars['inet_http_server_port'] = inet_http_server_port
template_vars['basepath'] = ROOT_DIR
template_vars['tmp_dir'] = TMP_DIR
conf = template % template_vars
conf_file_path = os.path.join(TMP_DIR,
os.path.basename(tpl).replace('.tpl', ''))
with open(conf_file_path, 'w') as conf_file:
conf_file.write(conf)
return conf_file_path
def main(tpls):
"""main."""
main_tpl = tpls[0]
main_name = os.path.basename(main_tpl).replace('.conf.tpl', '')
pid_file = os.path.join(TMP_DIR, main_name + '.pid')
port_name = main_name + '-rpc'
# first, check if it's already running
if is_running("supervisor", pid_file, port_name):
print "Supervisor [%s] already running" % main_name
sys.exit(0)
print "Starting supervisor [%s]" % main_name
# get port
inet_http_server_port, = local.allocate_ports(1)
local.register_local_port(port_name, inet_http_server_port)
conf_file_path = write_conf(main_tpl, inet_http_server_port)
for tpl in tpls[1:]:
write_conf(tpl, inet_http_server_port)
os.chdir(ROOT_DIR)
try:
os.mkdir(os.path.join(TMP_DIR, main_name + '-childlog'))
except OSError:
pass
os_exec("/usr/bin/supervisord", "-c", conf_file_path)
if __name__ == "__main__":
main(sys.argv[1:])
|
santoshsahoo/filesync-server
|
lib/ubuntuone/supervisor/start-supervisord.py
|
Python
|
agpl-3.0
| 2,561
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyAgateSql(PythonPackage):
"""agate-sql adds SQL read/write support to agate."""
homepage = "https://agate-sql.readthedocs.io/en/latest/"
pypi = "agate-sql/agate-sql-0.5.4.tar.gz"
version('0.5.4', sha256='9277490ba8b8e7c747a9ae3671f52fe486784b48d4a14e78ca197fb0e36f281b')
depends_on('py-setuptools', type='build')
depends_on('py-agate@1.5.0:', type=('build', 'run'))
depends_on('py-sqlalchemy@1.0.8:', type=('build', 'run'))
|
LLNL/spack
|
var/spack/repos/builtin/packages/py-agate-sql/package.py
|
Python
|
lgpl-2.1
| 694
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class DtDiamond(Package):
"""This package has an indirect diamond dependency on dt-diamond-bottom"""
homepage = "http://www.example.com"
url = "http://www.example.com/dt-diamond-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dt-diamond-left')
depends_on('dt-diamond-right')
|
LLNL/spack
|
var/spack/repos/builtin.mock/packages/dt-diamond/package.py
|
Python
|
lgpl-2.1
| 548
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Maq(AutotoolsPackage):
"""Maq is a software that builds mapping assemblies from short reads
generated by the next-generation sequencing machines."""
homepage = "http://maq.sourceforge.net/"
url = "https://downloads.sourceforge.net/project/maq/maq/0.7.1/maq-0.7.1.tar.bz2"
list_url = "https://sourceforge.net/projects/maq/files/maq/"
version('0.7.1', sha256='e1671e0408b0895f5ab943839ee8f28747cf5f55dc64032c7469b133202b6de2')
version('0.5.0', sha256='c292c19baf291b2415b460d687d43a71ece00a7d178cc5984bc8fc30cfce2dfb')
conflicts('%gcc@4.7.0:', when='@0.7.1')
|
LLNL/spack
|
var/spack/repos/builtin/packages/maq/package.py
|
Python
|
lgpl-2.1
| 826
|
x = 1
x += 2
assert type(x) == int
del x
pass
exec('x = 1')
from sys import winver
global x
import sys
print(x)
try:
x = 1
except:
pass
try:
x = 1
finally:
pass
with open('fob.txt', 'w+'):
x = 1
|
DinoV/PTVS
|
Python/Tests/TestData/Coverage/Statements/test.py
|
Python
|
apache-2.0
| 225
|
"""JOSE interfaces."""
import abc
import collections
import json
import six
from acme.jose import errors
from acme.jose import util
# pylint: disable=no-self-argument,no-method-argument,no-init,inherit-non-class
# pylint: disable=too-few-public-methods
@six.add_metaclass(abc.ABCMeta)
class JSONDeSerializable(object):
# pylint: disable=too-few-public-methods
"""Interface for (de)serializable JSON objects.
Please recall, that standard Python library implements
:class:`json.JSONEncoder` and :class:`json.JSONDecoder` that perform
translations based on respective :ref:`conversion tables
<conversion-table>` that look pretty much like the one below (for
complete tables see relevant Python documentation):
.. _conversion-table:
====== ======
JSON Python
====== ======
object dict
... ...
====== ======
While the above **conversion table** is about translation of JSON
documents to/from the basic Python types only,
:class:`JSONDeSerializable` introduces the following two concepts:
serialization
Turning an arbitrary Python object into Python object that can
be encoded into a JSON document. **Full serialization** produces
a Python object composed of only basic types as required by the
:ref:`conversion table <conversion-table>`. **Partial
serialization** (accomplished by :meth:`to_partial_json`)
produces a Python object that might also be built from other
:class:`JSONDeSerializable` objects.
deserialization
Turning a decoded Python object (necessarily one of the basic
types as required by the :ref:`conversion table
<conversion-table>`) into an arbitrary Python object.
Serialization produces **serialized object** ("partially serialized
object" or "fully serialized object" for partial and full
serialization respectively) and deserialization produces
**deserialized object**, both usually denoted in the source code as
``jobj``.
Wording in the official Python documentation might be confusing
after reading the above, but in the light of those definitions, one
can view :meth:`json.JSONDecoder.decode` as decoder and
deserializer of basic types, :meth:`json.JSONEncoder.default` as
serializer of basic types, :meth:`json.JSONEncoder.encode` as
serializer and encoder of basic types.
One could extend :mod:`json` to support arbitrary object
(de)serialization either by:
- overriding :meth:`json.JSONDecoder.decode` and
:meth:`json.JSONEncoder.default` in subclasses
- or passing ``object_hook`` argument (or ``object_hook_pairs``)
to :func:`json.load`/:func:`json.loads` or ``default`` argument
for :func:`json.dump`/:func:`json.dumps`.
Interestingly, ``default`` is required to perform only partial
serialization, as :func:`json.dumps` applies ``default``
recursively. This is the idea behind making :meth:`to_partial_json`
produce only partial serialization, while providing custom
:meth:`json_dumps` that dumps with ``default`` set to
:meth:`json_dump_default`.
To make further documentation a bit more concrete, please, consider
the following imaginatory implementation example::
class Foo(JSONDeSerializable):
def to_partial_json(self):
return 'foo'
@classmethod
def from_json(cls, jobj):
return Foo()
class Bar(JSONDeSerializable):
def to_partial_json(self):
return [Foo(), Foo()]
@classmethod
def from_json(cls, jobj):
return Bar()
"""
@abc.abstractmethod
def to_partial_json(self): # pragma: no cover
"""Partially serialize.
Following the example, **partial serialization** means the following::
assert isinstance(Bar().to_partial_json()[0], Foo)
assert isinstance(Bar().to_partial_json()[1], Foo)
# in particular...
assert Bar().to_partial_json() != ['foo', 'foo']
:raises acme.jose.errors.SerializationError:
in case of any serialization error.
:returns: Partially serializable object.
"""
raise NotImplementedError()
def to_json(self):
"""Fully serialize.
Again, following the example from before, **full serialization**
means the following::
assert Bar().to_json() == ['foo', 'foo']
:raises acme.jose.errors.SerializationError:
in case of any serialization error.
:returns: Fully serialized object.
"""
def _serialize(obj):
if isinstance(obj, JSONDeSerializable):
return _serialize(obj.to_partial_json())
if isinstance(obj, six.string_types): # strings are Sequence
return obj
elif isinstance(obj, list):
return [_serialize(subobj) for subobj in obj]
elif isinstance(obj, collections.Sequence):
# default to tuple, otherwise Mapping could get
# unhashable list
return tuple(_serialize(subobj) for subobj in obj)
elif isinstance(obj, collections.Mapping):
return dict((_serialize(key), _serialize(value))
for key, value in six.iteritems(obj))
else:
return obj
return _serialize(self)
@util.abstractclassmethod
def from_json(cls, jobj): # pylint: disable=unused-argument
"""Deserialize a decoded JSON document.
:param jobj: Python object, composed of only other basic data
types, as decoded from JSON document. Not necessarily
:class:`dict` (as decoded from "JSON object" document).
:raises acme.jose.errors.DeserializationError:
if decoding was unsuccessful, e.g. in case of unparseable
X509 certificate, or wrong padding in JOSE base64 encoded
string, etc.
"""
# TypeError: Can't instantiate abstract class <cls> with
# abstract methods from_json, to_partial_json
return cls() # pylint: disable=abstract-class-instantiated
@classmethod
def json_loads(cls, json_string):
"""Deserialize from JSON document string."""
try:
loads = json.loads(json_string)
except ValueError as error:
raise errors.DeserializationError(error)
return cls.from_json(loads)
def json_dumps(self, **kwargs):
"""Dump to JSON string using proper serializer.
:returns: JSON document string.
:rtype: str
"""
return json.dumps(self, default=self.json_dump_default, **kwargs)
def json_dumps_pretty(self):
"""Dump the object to pretty JSON document string.
:rtype: str
"""
return self.json_dumps(sort_keys=True, indent=4)
@classmethod
def json_dump_default(cls, python_object):
"""Serialize Python object.
This function is meant to be passed as ``default`` to
:func:`json.dump` or :func:`json.dumps`. They call
``default(python_object)`` only for non-basic Python types, so
this function necessarily raises :class:`TypeError` if
``python_object`` is not an instance of
:class:`IJSONSerializable`.
Please read the class docstring for more information.
"""
if isinstance(python_object, JSONDeSerializable):
return python_object.to_partial_json()
else: # this branch is necessary, cannot just "return"
raise TypeError(repr(python_object) + ' is not JSON serializable')
|
hsduk/lets-encrypt-preview
|
acme/acme/jose/interfaces.py
|
Python
|
apache-2.0
| 7,747
|
#
# Copyright 2013 Rackspace Hosting.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import jsonpath_rw_ext
import mock
from oslo_config import fixture as fixture_config
import six
from ceilometer.event import converter
from ceilometer.event.storage import models
from ceilometer.tests import base
class ConverterBase(base.BaseTestCase):
@staticmethod
def _create_test_notification(event_type, message_id, **kw):
return dict(event_type=event_type,
message_id=message_id,
priority="INFO",
publisher_id="compute.host-1-2-3",
timestamp="2013-08-08 21:06:37.803826",
payload=kw,
)
def assertIsValidEvent(self, event, notification):
self.assertIsNot(
None, event,
"Notification dropped unexpectedly:"
" %s" % str(notification))
self.assertIsInstance(event, models.Event)
def assertIsNotValidEvent(self, event, notification):
self.assertIs(
None, event,
"Notification NOT dropped when expected to be dropped:"
" %s" % str(notification))
def assertHasTrait(self, event, name, value=None, dtype=None):
traits = [trait for trait in event.traits if trait.name == name]
self.assertTrue(
len(traits) > 0,
"Trait %s not found in event %s" % (name, event))
trait = traits[0]
if value is not None:
self.assertEqual(value, trait.value)
if dtype is not None:
self.assertEqual(dtype, trait.dtype)
if dtype == models.Trait.INT_TYPE:
self.assertIsInstance(trait.value, int)
elif dtype == models.Trait.FLOAT_TYPE:
self.assertIsInstance(trait.value, float)
elif dtype == models.Trait.DATETIME_TYPE:
self.assertIsInstance(trait.value, datetime.datetime)
elif dtype == models.Trait.TEXT_TYPE:
self.assertIsInstance(trait.value, six.string_types)
def assertDoesNotHaveTrait(self, event, name):
traits = [trait for trait in event.traits if trait.name == name]
self.assertEqual(
len(traits), 0,
"Extra Trait %s found in event %s" % (name, event))
def assertHasDefaultTraits(self, event):
text = models.Trait.TEXT_TYPE
self.assertHasTrait(event, 'service', dtype=text)
def _cmp_tree(self, this, other):
if hasattr(this, 'right') and hasattr(other, 'right'):
return (self._cmp_tree(this.right, other.right) and
self._cmp_tree(this.left, other.left))
if not hasattr(this, 'right') and not hasattr(other, 'right'):
return this == other
return False
def assertPathsEqual(self, path1, path2):
self.assertTrue(self._cmp_tree(path1, path2),
'JSONPaths not equivalent %s %s' % (path1, path2))
class TestTraitDefinition(ConverterBase):
def setUp(self):
super(TestTraitDefinition, self).setUp()
self.n1 = self._create_test_notification(
"test.thing",
"uuid-for-notif-0001",
instance_uuid="uuid-for-instance-0001",
instance_id="id-for-instance-0001",
instance_uuid2=None,
instance_id2=None,
host='host-1-2-3',
bogus_date='',
image_meta=dict(
disk_gb='20',
thing='whatzit'),
foobar=50)
self.ext1 = mock.MagicMock(name='mock_test_plugin')
self.test_plugin_class = self.ext1.plugin
self.test_plugin = self.test_plugin_class()
self.test_plugin.trait_value.return_value = 'foobar'
self.ext1.reset_mock()
self.ext2 = mock.MagicMock(name='mock_nothing_plugin')
self.nothing_plugin_class = self.ext2.plugin
self.nothing_plugin = self.nothing_plugin_class()
self.nothing_plugin.trait_value.return_value = None
self.ext2.reset_mock()
self.fake_plugin_mgr = dict(test=self.ext1, nothing=self.ext2)
def test_to_trait_with_plugin(self):
cfg = dict(type='text',
fields=['payload.instance_id', 'payload.instance_uuid'],
plugin=dict(name='test'))
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('test_trait', t.name)
self.assertEqual(models.Trait.TEXT_TYPE, t.dtype)
self.assertEqual('foobar', t.value)
self.test_plugin_class.assert_called_once_with()
self.test_plugin.trait_value.assert_called_once_with([
('payload.instance_id', 'id-for-instance-0001'),
('payload.instance_uuid', 'uuid-for-instance-0001')])
def test_to_trait_null_match_with_plugin(self):
cfg = dict(type='text',
fields=['payload.nothere', 'payload.bogus'],
plugin=dict(name='test'))
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('test_trait', t.name)
self.assertEqual(models.Trait.TEXT_TYPE, t.dtype)
self.assertEqual('foobar', t.value)
self.test_plugin_class.assert_called_once_with()
self.test_plugin.trait_value.assert_called_once_with([])
def test_to_trait_with_plugin_null(self):
cfg = dict(type='text',
fields=['payload.instance_id', 'payload.instance_uuid'],
plugin=dict(name='nothing'))
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIs(None, t)
self.nothing_plugin_class.assert_called_once_with()
self.nothing_plugin.trait_value.assert_called_once_with([
('payload.instance_id', 'id-for-instance-0001'),
('payload.instance_uuid', 'uuid-for-instance-0001')])
def test_to_trait_with_plugin_with_parameters(self):
cfg = dict(type='text',
fields=['payload.instance_id', 'payload.instance_uuid'],
plugin=dict(name='test', parameters=dict(a=1, b='foo')))
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('test_trait', t.name)
self.assertEqual(models.Trait.TEXT_TYPE, t.dtype)
self.assertEqual('foobar', t.value)
self.test_plugin_class.assert_called_once_with(a=1, b='foo')
self.test_plugin.trait_value.assert_called_once_with([
('payload.instance_id', 'id-for-instance-0001'),
('payload.instance_uuid', 'uuid-for-instance-0001')])
def test_to_trait(self):
cfg = dict(type='text', fields='payload.instance_id')
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('test_trait', t.name)
self.assertEqual(models.Trait.TEXT_TYPE, t.dtype)
self.assertEqual('id-for-instance-0001', t.value)
cfg = dict(type='int', fields='payload.image_meta.disk_gb')
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('test_trait', t.name)
self.assertEqual(models.Trait.INT_TYPE, t.dtype)
self.assertEqual(20, t.value)
def test_to_trait_multiple(self):
cfg = dict(type='text', fields=['payload.instance_id',
'payload.instance_uuid'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('id-for-instance-0001', t.value)
cfg = dict(type='text', fields=['payload.instance_uuid',
'payload.instance_id'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('uuid-for-instance-0001', t.value)
def test_to_trait_multiple_different_nesting(self):
cfg = dict(type='int', fields=['payload.foobar',
'payload.image_meta.disk_gb'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual(50, t.value)
cfg = dict(type='int', fields=['payload.image_meta.disk_gb',
'payload.foobar'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual(20, t.value)
def test_to_trait_some_null_multiple(self):
cfg = dict(type='text', fields=['payload.instance_id2',
'payload.instance_uuid'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('uuid-for-instance-0001', t.value)
def test_to_trait_some_missing_multiple(self):
cfg = dict(type='text', fields=['payload.not_here_boss',
'payload.instance_uuid'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIsInstance(t, models.Trait)
self.assertEqual('uuid-for-instance-0001', t.value)
def test_to_trait_missing(self):
cfg = dict(type='text', fields='payload.not_here_boss')
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIs(None, t)
def test_to_trait_null(self):
cfg = dict(type='text', fields='payload.instance_id2')
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIs(None, t)
def test_to_trait_empty_nontext(self):
cfg = dict(type='datetime', fields='payload.bogus_date')
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIs(None, t)
def test_to_trait_multiple_null_missing(self):
cfg = dict(type='text', fields=['payload.not_here_boss',
'payload.instance_id2'])
tdef = converter.TraitDefinition('test_trait', cfg,
self.fake_plugin_mgr)
t = tdef.to_trait(self.n1)
self.assertIs(None, t)
def test_missing_fields_config(self):
self.assertRaises(converter.EventDefinitionException,
converter.TraitDefinition,
'bogus_trait',
dict(),
self.fake_plugin_mgr)
def test_string_fields_config(self):
cfg = dict(fields='payload.test')
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertPathsEqual(t.fields, jsonpath_rw_ext.parse('payload.test'))
def test_list_fields_config(self):
cfg = dict(fields=['payload.test', 'payload.other'])
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertPathsEqual(
t.fields,
jsonpath_rw_ext.parse('(payload.test)|(payload.other)'))
def test_invalid_path_config(self):
# test invalid jsonpath...
cfg = dict(fields='payload.bogus(')
self.assertRaises(converter.EventDefinitionException,
converter.TraitDefinition,
'bogus_trait',
cfg,
self.fake_plugin_mgr)
def test_invalid_plugin_config(self):
# test invalid jsonpath...
cfg = dict(fields='payload.test', plugin=dict(bogus="true"))
self.assertRaises(converter.EventDefinitionException,
converter.TraitDefinition,
'test_trait',
cfg,
self.fake_plugin_mgr)
def test_unknown_plugin(self):
# test invalid jsonpath...
cfg = dict(fields='payload.test', plugin=dict(name='bogus'))
self.assertRaises(converter.EventDefinitionException,
converter.TraitDefinition,
'test_trait',
cfg,
self.fake_plugin_mgr)
def test_type_config(self):
cfg = dict(type='text', fields='payload.test')
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertEqual(models.Trait.TEXT_TYPE, t.trait_type)
cfg = dict(type='int', fields='payload.test')
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertEqual(models.Trait.INT_TYPE, t.trait_type)
cfg = dict(type='float', fields='payload.test')
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertEqual(models.Trait.FLOAT_TYPE, t.trait_type)
cfg = dict(type='datetime', fields='payload.test')
t = converter.TraitDefinition('test_trait', cfg, self.fake_plugin_mgr)
self.assertEqual(models.Trait.DATETIME_TYPE, t.trait_type)
def test_invalid_type_config(self):
# test invalid jsonpath...
cfg = dict(type='bogus', fields='payload.test')
self.assertRaises(converter.EventDefinitionException,
converter.TraitDefinition,
'bogus_trait',
cfg,
self.fake_plugin_mgr)
class TestEventDefinition(ConverterBase):
def setUp(self):
super(TestEventDefinition, self).setUp()
self.traits_cfg = {
'instance_id': {
'type': 'text',
'fields': ['payload.instance_uuid',
'payload.instance_id'],
},
'host': {
'type': 'text',
'fields': 'payload.host',
},
}
self.test_notification1 = self._create_test_notification(
"test.thing",
"uuid-for-notif-0001",
instance_id="uuid-for-instance-0001",
host='host-1-2-3')
self.test_notification2 = self._create_test_notification(
"test.thing",
"uuid-for-notif-0002",
instance_id="uuid-for-instance-0002")
self.test_notification3 = self._create_test_notification(
"test.thing",
"uuid-for-notif-0003",
instance_id="uuid-for-instance-0003",
host=None)
self.fake_plugin_mgr = {}
def test_to_event(self):
dtype = models.Trait.TEXT_TYPE
cfg = dict(event_type='test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
e = edef.to_event(self.test_notification1)
self.assertEqual('test.thing', e.event_type)
self.assertEqual(datetime.datetime(2013, 8, 8, 21, 6, 37, 803826),
e.generated)
self.assertHasDefaultTraits(e)
self.assertHasTrait(e, 'host', value='host-1-2-3', dtype=dtype)
self.assertHasTrait(e, 'instance_id',
value='uuid-for-instance-0001',
dtype=dtype)
def test_to_event_missing_trait(self):
dtype = models.Trait.TEXT_TYPE
cfg = dict(event_type='test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
e = edef.to_event(self.test_notification2)
self.assertHasDefaultTraits(e)
self.assertHasTrait(e, 'instance_id',
value='uuid-for-instance-0002',
dtype=dtype)
self.assertDoesNotHaveTrait(e, 'host')
def test_to_event_null_trait(self):
dtype = models.Trait.TEXT_TYPE
cfg = dict(event_type='test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
e = edef.to_event(self.test_notification3)
self.assertHasDefaultTraits(e)
self.assertHasTrait(e, 'instance_id',
value='uuid-for-instance-0003',
dtype=dtype)
self.assertDoesNotHaveTrait(e, 'host')
def test_bogus_cfg_no_traits(self):
bogus = dict(event_type='test.foo')
self.assertRaises(converter.EventDefinitionException,
converter.EventDefinition,
bogus,
self.fake_plugin_mgr)
def test_bogus_cfg_no_type(self):
bogus = dict(traits=self.traits_cfg)
self.assertRaises(converter.EventDefinitionException,
converter.EventDefinition,
bogus,
self.fake_plugin_mgr)
def test_included_type_string(self):
cfg = dict(event_type='test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertEqual(1, len(edef._included_types))
self.assertEqual('test.thing', edef._included_types[0])
self.assertEqual(0, len(edef._excluded_types))
self.assertTrue(edef.included_type('test.thing'))
self.assertFalse(edef.excluded_type('test.thing'))
self.assertTrue(edef.match_type('test.thing'))
self.assertFalse(edef.match_type('random.thing'))
def test_included_type_list(self):
cfg = dict(event_type=['test.thing', 'other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertEqual(2, len(edef._included_types))
self.assertEqual(0, len(edef._excluded_types))
self.assertTrue(edef.included_type('test.thing'))
self.assertTrue(edef.included_type('other.thing'))
self.assertFalse(edef.excluded_type('test.thing'))
self.assertTrue(edef.match_type('test.thing'))
self.assertTrue(edef.match_type('other.thing'))
self.assertFalse(edef.match_type('random.thing'))
def test_excluded_type_string(self):
cfg = dict(event_type='!test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertEqual(1, len(edef._included_types))
self.assertEqual('*', edef._included_types[0])
self.assertEqual('test.thing', edef._excluded_types[0])
self.assertEqual(1, len(edef._excluded_types))
self.assertEqual('test.thing', edef._excluded_types[0])
self.assertTrue(edef.excluded_type('test.thing'))
self.assertTrue(edef.included_type('random.thing'))
self.assertFalse(edef.match_type('test.thing'))
self.assertTrue(edef.match_type('random.thing'))
def test_excluded_type_list(self):
cfg = dict(event_type=['!test.thing', '!other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertEqual(1, len(edef._included_types))
self.assertEqual(2, len(edef._excluded_types))
self.assertTrue(edef.excluded_type('test.thing'))
self.assertTrue(edef.excluded_type('other.thing'))
self.assertFalse(edef.excluded_type('random.thing'))
self.assertFalse(edef.match_type('test.thing'))
self.assertFalse(edef.match_type('other.thing'))
self.assertTrue(edef.match_type('random.thing'))
def test_mixed_type_list(self):
cfg = dict(event_type=['*.thing', '!test.thing', '!other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertEqual(1, len(edef._included_types))
self.assertEqual(2, len(edef._excluded_types))
self.assertTrue(edef.excluded_type('test.thing'))
self.assertTrue(edef.excluded_type('other.thing'))
self.assertFalse(edef.excluded_type('random.thing'))
self.assertFalse(edef.match_type('test.thing'))
self.assertFalse(edef.match_type('other.thing'))
self.assertFalse(edef.match_type('random.whatzit'))
self.assertTrue(edef.match_type('random.thing'))
def test_catchall(self):
cfg = dict(event_type=['*.thing', '!test.thing', '!other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertFalse(edef.is_catchall)
cfg = dict(event_type=['!other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertFalse(edef.is_catchall)
cfg = dict(event_type=['other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertFalse(edef.is_catchall)
cfg = dict(event_type=['*', '!other.thing'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertFalse(edef.is_catchall)
cfg = dict(event_type=['*'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertTrue(edef.is_catchall)
cfg = dict(event_type=['*', 'foo'],
traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
self.assertTrue(edef.is_catchall)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_extract_when(self, mock_utcnow):
now = datetime.datetime.utcnow()
modified = now + datetime.timedelta(minutes=1)
mock_utcnow.return_value = now
body = {"timestamp": str(modified)}
when = converter.EventDefinition._extract_when(body)
self.assertTimestampEqual(modified, when)
body = {"_context_timestamp": str(modified)}
when = converter.EventDefinition._extract_when(body)
self.assertTimestampEqual(modified, when)
then = now + datetime.timedelta(hours=1)
body = {"timestamp": str(modified), "_context_timestamp": str(then)}
when = converter.EventDefinition._extract_when(body)
self.assertTimestampEqual(modified, when)
when = converter.EventDefinition._extract_when({})
self.assertTimestampEqual(now, when)
def test_default_traits(self):
cfg = dict(event_type='test.thing', traits={})
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
default_traits = converter.EventDefinition.DEFAULT_TRAITS.keys()
traits = set(edef.traits.keys())
for dt in default_traits:
self.assertIn(dt, traits)
self.assertEqual(len(converter.EventDefinition.DEFAULT_TRAITS),
len(edef.traits))
def test_traits(self):
cfg = dict(event_type='test.thing', traits=self.traits_cfg)
edef = converter.EventDefinition(cfg, self.fake_plugin_mgr)
default_traits = converter.EventDefinition.DEFAULT_TRAITS.keys()
traits = set(edef.traits.keys())
for dt in default_traits:
self.assertIn(dt, traits)
self.assertIn('host', traits)
self.assertIn('instance_id', traits)
self.assertEqual(len(converter.EventDefinition.DEFAULT_TRAITS) + 2,
len(edef.traits))
class TestNotificationConverter(ConverterBase):
def setUp(self):
super(TestNotificationConverter, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.valid_event_def1 = [{
'event_type': 'compute.instance.create.*',
'traits': {
'instance_id': {
'type': 'text',
'fields': ['payload.instance_uuid',
'payload.instance_id'],
},
'host': {
'type': 'text',
'fields': 'payload.host',
},
},
}]
self.test_notification1 = self._create_test_notification(
"compute.instance.create.start",
"uuid-for-notif-0001",
instance_id="uuid-for-instance-0001",
host='host-1-2-3')
self.test_notification2 = self._create_test_notification(
"bogus.notification.from.mars",
"uuid-for-notif-0002",
weird='true',
host='cydonia')
self.fake_plugin_mgr = {}
@mock.patch('oslo_utils.timeutils.utcnow')
def test_converter_missing_keys(self, mock_utcnow):
# test a malformed notification
now = datetime.datetime.utcnow()
mock_utcnow.return_value = now
c = converter.NotificationEventsConverter(
[],
self.fake_plugin_mgr,
add_catchall=True)
message = {'event_type': "foo",
'message_id': "abc",
'publisher_id': "1"}
e = c.to_event(message)
self.assertIsValidEvent(e, message)
self.assertEqual(1, len(e.traits))
self.assertEqual("foo", e.event_type)
self.assertEqual(now, e.generated)
def test_converter_with_catchall(self):
c = converter.NotificationEventsConverter(
self.valid_event_def1,
self.fake_plugin_mgr,
add_catchall=True)
self.assertEqual(2, len(c.definitions))
e = c.to_event(self.test_notification1)
self.assertIsValidEvent(e, self.test_notification1)
self.assertEqual(3, len(e.traits))
self.assertHasDefaultTraits(e)
self.assertHasTrait(e, 'instance_id')
self.assertHasTrait(e, 'host')
e = c.to_event(self.test_notification2)
self.assertIsValidEvent(e, self.test_notification2)
self.assertEqual(1, len(e.traits))
self.assertHasDefaultTraits(e)
self.assertDoesNotHaveTrait(e, 'instance_id')
self.assertDoesNotHaveTrait(e, 'host')
def test_converter_without_catchall(self):
c = converter.NotificationEventsConverter(
self.valid_event_def1,
self.fake_plugin_mgr,
add_catchall=False)
self.assertEqual(1, len(c.definitions))
e = c.to_event(self.test_notification1)
self.assertIsValidEvent(e, self.test_notification1)
self.assertEqual(3, len(e.traits))
self.assertHasDefaultTraits(e)
self.assertHasTrait(e, 'instance_id')
self.assertHasTrait(e, 'host')
e = c.to_event(self.test_notification2)
self.assertIsNotValidEvent(e, self.test_notification2)
def test_converter_empty_cfg_with_catchall(self):
c = converter.NotificationEventsConverter(
[],
self.fake_plugin_mgr,
add_catchall=True)
self.assertEqual(1, len(c.definitions))
e = c.to_event(self.test_notification1)
self.assertIsValidEvent(e, self.test_notification1)
self.assertEqual(1, len(e.traits))
self.assertHasDefaultTraits(e)
e = c.to_event(self.test_notification2)
self.assertIsValidEvent(e, self.test_notification2)
self.assertEqual(1, len(e.traits))
self.assertHasDefaultTraits(e)
def test_converter_empty_cfg_without_catchall(self):
c = converter.NotificationEventsConverter(
[],
self.fake_plugin_mgr,
add_catchall=False)
self.assertEqual(0, len(c.definitions))
e = c.to_event(self.test_notification1)
self.assertIsNotValidEvent(e, self.test_notification1)
e = c.to_event(self.test_notification2)
self.assertIsNotValidEvent(e, self.test_notification2)
@staticmethod
def _convert_message(convert, level):
message = {'priority': level, 'event_type': "foo",
'message_id': "abc", 'publisher_id': "1"}
return convert.to_event(message)
def test_store_raw_all(self):
self.CONF.event.store_raw = ['info', 'error']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertTrue(self._convert_message(c, 'info').raw)
self.assertTrue(self._convert_message(c, 'error').raw)
def test_store_raw_info_only(self):
self.CONF.event.store_raw = ['info']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertTrue(self._convert_message(c, 'info').raw)
self.assertFalse(self._convert_message(c, 'error').raw)
def test_store_raw_error_only(self):
self.CONF.event.store_raw = ['error']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertFalse(self._convert_message(c, 'info').raw)
self.assertTrue(self._convert_message(c, 'error').raw)
def test_store_raw_skip_all(self):
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertFalse(self._convert_message(c, 'info').raw)
self.assertFalse(self._convert_message(c, 'error').raw)
def test_store_raw_info_only_no_case(self):
self.CONF.event.store_raw = ['INFO']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertTrue(self._convert_message(c, 'info').raw)
self.assertFalse(self._convert_message(c, 'error').raw)
def test_store_raw_bad_skip_all(self):
self.CONF.event.store_raw = ['unknown']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertFalse(self._convert_message(c, 'info').raw)
self.assertFalse(self._convert_message(c, 'error').raw)
def test_store_raw_bad_and_good(self):
self.CONF.event.store_raw = ['info', 'unknown']
c = converter.NotificationEventsConverter(
[], self.fake_plugin_mgr)
self.assertTrue(self._convert_message(c, 'info').raw)
self.assertFalse(self._convert_message(c, 'error').raw)
@mock.patch('ceilometer.event.converter.get_config_file',
mock.Mock(return_value=None))
def test_setup_events_default_config(self):
self.CONF.set_override('drop_unmatched_notifications',
False, group='event')
c = converter.setup_events(self.fake_plugin_mgr)
self.assertIsInstance(c, converter.NotificationEventsConverter)
self.assertEqual(1, len(c.definitions))
self.assertTrue(c.definitions[0].is_catchall)
self.CONF.set_override('drop_unmatched_notifications',
True, group='event')
c = converter.setup_events(self.fake_plugin_mgr)
self.assertIsInstance(c, converter.NotificationEventsConverter)
self.assertEqual(0, len(c.definitions))
|
cernops/ceilometer
|
ceilometer/tests/unit/event/test_converter.py
|
Python
|
apache-2.0
| 32,666
|
# Generated by Django 3.1.8 on 2021-05-01 12:34
from django.db import migrations, models
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
# Set starred_message_count for already existing users to True.
def set_starred_message_count_to_true(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile")
UserProfile.objects.filter(starred_message_counts=False).update(starred_message_counts=True)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("zerver", "0322_realm_create_audit_log_backfill"),
]
operations = [
migrations.AlterField(
model_name="userprofile",
name="starred_message_counts",
field=models.BooleanField(default=True),
),
migrations.RunPython(
set_starred_message_count_to_true,
reverse_code=migrations.RunPython.noop,
elidable=True,
),
]
|
andersk/zulip
|
zerver/migrations/0323_show_starred_message_counts.py
|
Python
|
apache-2.0
| 1,048
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import numbers
import re
import semantic_version
import six
from glance.common.artifacts import declarative
import glance.common.exception as exc
from glance import i18n
_ = i18n._
class Text(declarative.PropertyDefinition):
"""A text metadata property of arbitrary length
Maps to TEXT columns in database, does not support sorting or filtering
"""
ALLOWED_TYPES = (six.string_types,)
DB_TYPE = 'text'
# noinspection PyAttributeOutsideInit
class String(Text):
"""A string metadata property of limited length
Maps to VARCHAR columns in database, supports filtering and sorting.
May have constrains on length and regexp patterns.
The maximum length is limited to 255 characters
"""
DB_TYPE = 'string'
def __init__(self, max_length=255, min_length=0, pattern=None, **kwargs):
"""Defines a String metadata property.
:param max_length: maximum value length
:param min_length: minimum value length
:param pattern: regexp pattern to match
"""
super(String, self).__init__(**kwargs)
self.max_length(max_length)
self.min_length(min_length)
if pattern:
self.pattern(pattern)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def max_length(self, value):
"""Sets the maximum value length"""
self._max_length = value
if value is not None:
if value > 255:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Max string length may not exceed 255 characters'))
self._add_validator('max_length',
lambda v: len(v) <= self._max_length,
_('Length is greater than maximum'))
else:
self._remove_validator('max_length')
self._check_definition()
def min_length(self, value):
"""Sets the minimum value length"""
self._min_length = value
if value is not None:
if value < 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Min string length may not be negative'))
self._add_validator('min_length',
lambda v: len(v) >= self._min_length,
_('Length is less than minimum'))
else:
self._remove_validator('min_length')
self._check_definition()
def pattern(self, value):
"""Sets the regexp pattern to match"""
self._pattern = value
if value is not None:
self._add_validator('pattern',
lambda v: re.match(self._pattern,
v) is not None,
_('Does not match pattern'))
else:
self._remove_validator('pattern')
self._check_definition()
class SemVerString(String):
"""A String metadata property matching semver pattern"""
def __init__(self, **kwargs):
def validate(value):
try:
semantic_version.Version(value, partial=True)
except ValueError:
return False
return True
super(SemVerString,
self).__init__(validators=[(validate,
"Invalid semver string")],
**kwargs)
# noinspection PyAttributeOutsideInit
class Integer(declarative.PropertyDefinition):
"""An Integer metadata property
Maps to INT columns in Database, supports filtering and sorting.
May have constraints on value
"""
ALLOWED_TYPES = (six.integer_types,)
DB_TYPE = 'int'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines an Integer metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Integer, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class DateTime(declarative.PropertyDefinition):
"""A DateTime metadata property
Maps to a DATETIME columns in database.
Is not supported as Type Specific property, may be used only as Generic one
May have constraints on value
"""
ALLOWED_TYPES = (datetime.datetime,)
DB_TYPE = 'datetime'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a DateTime metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(DateTime, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class Numeric(declarative.PropertyDefinition):
"""A Numeric metadata property
Maps to floating point number columns in Database, supports filtering and
sorting. May have constraints on value
"""
ALLOWED_TYPES = numbers.Number
DB_TYPE = 'numeric'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a Numeric metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Numeric, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
class Boolean(declarative.PropertyDefinition):
"""A Boolean metadata property
Maps to Boolean columns in database. Supports filtering and sorting.
"""
ALLOWED_TYPES = (bool,)
DB_TYPE = 'bool'
class Array(declarative.ListAttributeDefinition,
declarative.PropertyDefinition, list):
"""An array metadata property
May contain elements of any other PropertyDefinition types except Dict and
Array. Each elements maps to appropriate type of columns in database.
Preserves order. Allows filtering based on "Array contains Value" semantics
May specify constrains on types of elements, their amount and uniqueness.
"""
ALLOWED_ITEM_TYPES = (declarative.PropertyDefinition,)
def __init__(self, item_type=String(), min_size=0, max_size=None,
unique=False, extra_items=True, **kwargs):
"""Defines an Array metadata property
:param item_type: defines the types of elements in Array. If set to an
instance of PropertyDefinition then all the elements have to be of that
type. If set to list of such instances, then the elements on the
corresponding positions have to be of the appropriate type.
:param min_size: minimum size of the Array
:param max_size: maximum size of the Array
:param unique: if set to true, all the elements in the Array have to be
unique
"""
if isinstance(item_type, Array):
msg = _("Array property can't have item_type=Array")
raise exc.InvalidArtifactTypePropertyDefinition(msg)
declarative.ListAttributeDefinition.__init__(self,
item_type=item_type,
min_size=min_size,
max_size=max_size,
unique=unique)
declarative.PropertyDefinition.__init__(self, **kwargs)
class Dict(declarative.DictAttributeDefinition,
declarative.PropertyDefinition, dict):
"""A dictionary metadata property
May contain elements of any other PropertyDefinition types except Dict.
Each elements maps to appropriate type of columns in database. Allows
filtering and sorting by values of each key except the ones mapping the
Text fields.
May specify constrains on types of elements and their amount.
"""
ALLOWED_PROPERTY_TYPES = (declarative.PropertyDefinition,)
def __init__(self, properties=String(), min_properties=0,
max_properties=None, **kwargs):
"""Defines a dictionary metadata property
:param properties: defines the types of dictionary values. If set to an
instance of PropertyDefinition then all the value have to be of that
type. If set to a dictionary with string keys and values of
PropertyDefinition type, then the elements mapped by the corresponding
have have to be of the appropriate type.
:param min_properties: minimum allowed amount of properties in the dict
:param max_properties: maximum allowed amount of properties in the dict
"""
declarative.DictAttributeDefinition.__init__(
self,
properties=properties,
min_properties=min_properties,
max_properties=max_properties)
declarative.PropertyDefinition.__init__(self, **kwargs)
class ArtifactType(declarative.get_declarative_base()): # noqa
"""A base class for all the Artifact Type definitions
Defines the Generic metadata properties as attributes.
"""
id = String(required=True, readonly=True)
type_name = String(required=True, readonly=True)
type_version = SemVerString(required=True, readonly=True)
name = String(required=True, mutable=False)
version = SemVerString(required=True, mutable=False)
description = Text()
tags = Array(unique=True, default=[])
visibility = String(required=True,
allowed_values=["private", "public", "shared",
"community"],
default="private")
state = String(required=True, readonly=True, allowed_values=["creating",
"active",
"deactivated",
"deleted"])
owner = String(required=True, readonly=True)
created_at = DateTime(required=True, readonly=True)
updated_at = DateTime(required=True, readonly=True)
published_at = DateTime(readonly=True)
deleted_at = DateTime(readonly=True)
def __init__(self, **kwargs):
if "type_name" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type explicitly"))
if "type_version" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type version explicitly"))
super(ArtifactType,
self).__init__(type_name=self.metadata.type_name,
type_version=self.metadata.type_version, **kwargs)
def __eq__(self, other):
if not isinstance(other, ArtifactType):
return False
return self.id == other.id
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.id)
def __is_mutable__(self):
return self.state == "creating"
class ArtifactReference(declarative.RelationDefinition):
"""An artifact reference definition
Allows to define constraints by the name and version of target artifact
"""
ALLOWED_TYPES = ArtifactType
def __init__(self, type_name=None, type_version=None, **kwargs):
"""Defines an artifact reference
:param type_name: type name of the target artifact
:param type_version: type version of the target artifact
"""
super(ArtifactReference, self).__init__(**kwargs)
if type_name is not None:
if isinstance(type_name, list):
type_names = list(type_name)
if type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if multiple types are possible'))
else:
type_names = [type_name]
def validate_reference(artifact):
if artifact.type_name not in type_names:
return False
if (type_version is not None and
artifact.type_version != type_version):
return False
return True
self._add_validator('referenced_type',
validate_reference,
_("Invalid referenced type"))
elif type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if type is not specified'))
self._check_definition()
class ArtifactReferenceList(declarative.ListAttributeDefinition,
declarative.RelationDefinition, list):
"""A list of Artifact References
Allows to define a collection of references to other artifacts, each
optionally constrained by type name and type version
"""
ALLOWED_ITEM_TYPES = (ArtifactReference,)
def __init__(self, references=ArtifactReference(), min_size=0,
max_size=None, **kwargs):
if isinstance(references, list):
raise exc.InvalidArtifactTypePropertyDefinition(
_("Invalid reference list specification"))
declarative.RelationDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=references,
min_size=min_size,
max_size=max_size,
unique=True,
default=[]
if min_size == 0 else
None)
class Blob(object):
"""A Binary object being part of the Artifact"""
def __init__(self, size=0, locations=None, checksum=None, item_key=None):
"""Initializes a new Binary Object for an Artifact
:param size: the size of Binary Data
:param locations: a list of data locations in backing stores
:param checksum: a checksum for the data
"""
if locations is None:
locations = []
self.size = size
self.checksum = checksum
self.locations = locations
self.item_key = item_key
def to_dict(self):
return {
"size": self.size,
"checksum": self.checksum,
}
class BinaryObject(declarative.BlobDefinition, Blob):
"""A definition of BinaryObject binding
Adds a BinaryObject to an Artifact Type, optionally constrained by file
size and amount of locations
"""
ALLOWED_TYPES = (Blob,)
def __init__(self,
max_file_size=None,
min_file_size=None,
min_locations=None,
max_locations=None,
**kwargs):
"""Defines a binary object as part of Artifact Type
:param max_file_size: maximum size of the associate Blob
:param min_file_size: minimum size of the associated Blob
:param min_locations: minimum number of locations in the associated
Blob
:param max_locations: maximum number of locations in the associated
Blob
"""
super(BinaryObject, self).__init__(default=None, readonly=False,
mutable=False, **kwargs)
self._max_file_size = max_file_size
self._min_file_size = min_file_size
self._min_locations = min_locations
self._max_locations = max_locations
self._add_validator('size_not_empty',
lambda v: v.size is not None,
_('Blob size is not set'))
if max_file_size:
self._add_validator('max_size',
lambda v: v.size <= self._max_file_size,
_("File too large"))
if min_file_size:
self._add_validator('min_size',
lambda v: v.size >= self._min_file_size,
_("File too small"))
if min_locations:
self._add_validator('min_locations',
lambda v: len(
v.locations) >= self._min_locations,
_("Too few locations"))
if max_locations:
self._add_validator(
'max_locations',
lambda v: len(v.locations) <= self._max_locations,
_("Too many locations"))
class BinaryObjectList(declarative.ListAttributeDefinition,
declarative.BlobDefinition, list):
"""A definition of binding to the list of BinaryObject
Adds a list of BinaryObject's to an artifact type, optionally constrained
by the number of objects in the list and their uniqueness
"""
ALLOWED_ITEM_TYPES = (BinaryObject,)
def __init__(self, objects=BinaryObject(), min_count=0, max_count=None,
**kwargs):
declarative.BlobDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=objects,
min_size=min_count,
max_size=max_count,
unique=True)
self.default = [] if min_count == 0 else None
|
saeki-masaki/glance
|
glance/common/artifacts/definitions.py
|
Python
|
apache-2.0
| 21,600
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder import objects
# NOTE(e0ne): Make sure we have all of the objects loaded. We do this
# at module import time, because we may be using mock decorators in our
# tests that run at import time.
objects.register_all()
|
phenoxim/cinder
|
cinder/tests/functional/__init__.py
|
Python
|
apache-2.0
| 799
|
import warnings
from pilkit.processors.base import *
warnings.warn('imagekit.processors.base is deprecated use imagekit.processors instead', DeprecationWarning)
__all__ = ['ProcessorPipeline', 'Adjust', 'Reflection', 'Transpose', 'Anchor', 'MakeOpaque']
|
FundedByMe/django-imagekit
|
imagekit/processors/base.py
|
Python
|
bsd-3-clause
| 257
|
from trac.perm import PermissionCache, PermissionSystem
from trac.ticket.api import TicketSystem
from trac.ticket.model import Ticket
from trac.test import EnvironmentStub, Mock
import unittest
class TicketSystemTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.perm = PermissionSystem(self.env)
self.ticket_system = TicketSystem(self.env)
self.req = Mock()
def tearDown(self):
self.env.reset_db()
def _get_actions(self, ticket_dict):
ts = TicketSystem(self.env)
ticket = Ticket(self.env)
ticket.populate(ticket_dict)
id = ticket.insert()
return ts.get_available_actions(self.req, Ticket(self.env, id))
def test_custom_field_text(self):
self.env.config.set('ticket-custom', 'test', 'text')
self.env.config.set('ticket-custom', 'test.label', 'Test')
self.env.config.set('ticket-custom', 'test.value', 'Foo bar')
self.env.config.set('ticket-custom', 'test.format', 'wiki')
fields = TicketSystem(self.env).get_custom_fields()
self.assertEqual({'name': 'test', 'type': 'text', 'label': 'Test',
'value': 'Foo bar', 'order': 0, 'format': 'wiki'},
fields[0])
def test_custom_field_select(self):
self.env.config.set('ticket-custom', 'test', 'select')
self.env.config.set('ticket-custom', 'test.label', 'Test')
self.env.config.set('ticket-custom', 'test.value', '1')
self.env.config.set('ticket-custom', 'test.options', 'option1|option2')
fields = TicketSystem(self.env).get_custom_fields()
self.assertEqual({'name': 'test', 'type': 'select', 'label': 'Test',
'value': '1', 'options': ['option1', 'option2'],
'order': 0},
fields[0])
def test_custom_field_optional_select(self):
self.env.config.set('ticket-custom', 'test', 'select')
self.env.config.set('ticket-custom', 'test.label', 'Test')
self.env.config.set('ticket-custom', 'test.value', '1')
self.env.config.set('ticket-custom', 'test.options', '|option1|option2')
fields = TicketSystem(self.env).get_custom_fields()
self.assertEqual({'name': 'test', 'type': 'select', 'label': 'Test',
'value': '1', 'options': ['option1', 'option2'],
'order': 0, 'optional': True},
fields[0])
def test_custom_field_textarea(self):
self.env.config.set('ticket-custom', 'test', 'textarea')
self.env.config.set('ticket-custom', 'test.label', 'Test')
self.env.config.set('ticket-custom', 'test.value', 'Foo bar')
self.env.config.set('ticket-custom', 'test.cols', '60')
self.env.config.set('ticket-custom', 'test.rows', '4')
self.env.config.set('ticket-custom', 'test.format', 'wiki')
fields = TicketSystem(self.env).get_custom_fields()
self.assertEqual({'name': 'test', 'type': 'textarea', 'label': 'Test',
'value': 'Foo bar', 'width': 60, 'height': 4,
'order': 0, 'format': 'wiki'},
fields[0])
def test_custom_field_order(self):
self.env.config.set('ticket-custom', 'test1', 'text')
self.env.config.set('ticket-custom', 'test1.order', '2')
self.env.config.set('ticket-custom', 'test2', 'text')
self.env.config.set('ticket-custom', 'test2.order', '1')
fields = TicketSystem(self.env).get_custom_fields()
self.assertEqual('test2', fields[0]['name'])
self.assertEqual('test1', fields[1]['name'])
def test_available_actions_full_perms(self):
self.perm.grant_permission('anonymous', 'TICKET_CREATE')
self.perm.grant_permission('anonymous', 'TICKET_MODIFY')
self.req.perm = PermissionCache(self.env)
self.assertEqual(['leave', 'resolve', 'reassign', 'accept'],
self._get_actions({'status': 'new'}))
self.assertEqual(['leave', 'resolve', 'reassign', 'accept'],
self._get_actions({'status': 'assigned'}))
self.assertEqual(['leave', 'resolve', 'reassign', 'accept'],
self._get_actions({'status': 'accepted'}))
self.assertEqual(['leave', 'resolve', 'reassign', 'accept'],
self._get_actions({'status': 'reopened'}))
self.assertEqual(['leave', 'reopen'],
self._get_actions({'status': 'closed'}))
def test_available_actions_no_perms(self):
self.req.perm = PermissionCache(self.env)
self.assertEqual(['leave'], self._get_actions({'status': 'new'}))
self.assertEqual(['leave'], self._get_actions({'status': 'assigned'}))
self.assertEqual(['leave'], self._get_actions({'status': 'accepted'}))
self.assertEqual(['leave'], self._get_actions({'status': 'reopened'}))
self.assertEqual(['leave'], self._get_actions({'status': 'closed'}))
def test_available_actions_create_only(self):
self.perm.grant_permission('anonymous', 'TICKET_CREATE')
self.req.perm = PermissionCache(self.env)
self.assertEqual(['leave'], self._get_actions({'status': 'new'}))
self.assertEqual(['leave'], self._get_actions({'status': 'assigned'}))
self.assertEqual(['leave'], self._get_actions({'status': 'accepted'}))
self.assertEqual(['leave'], self._get_actions({'status': 'reopened'}))
self.assertEqual(['leave', 'reopen'],
self._get_actions({'status': 'closed'}))
def test_available_actions_chgprop_only(self):
# CHGPROP is not enough for changing a ticket's state (#3289)
self.perm.grant_permission('anonymous', 'TICKET_CHGPROP')
self.req.perm = PermissionCache(self.env)
self.assertEqual(['leave'], self._get_actions({'status': 'new'}))
self.assertEqual(['leave'], self._get_actions({'status': 'assigned'}))
self.assertEqual(['leave'], self._get_actions({'status': 'accepted'}))
self.assertEqual(['leave'], self._get_actions({'status': 'reopened'}))
self.assertEqual(['leave'], self._get_actions({'status': 'closed'}))
def suite():
return unittest.makeSuite(TicketSystemTestCase, 'test')
if __name__ == '__main__':
unittest.main()
|
netjunki/trac-Pygit2
|
trac/ticket/tests/api.py
|
Python
|
bsd-3-clause
| 6,430
|
import numpy as np
from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid, HoverTool
from bokeh.models.widgets import Tabs, Panel, Paragraph
from bokeh.models.layouts import VBox
from bokeh.models.glyphs import (
AnnularWedge, Annulus, Arc, Bezier, Gear, Circle, ImageURL, Line, MultiLine, Oval,
Patch, Patches, Quad, Quadratic, Ray, Rect, Segment, Square, Text, Wedge, CircleX, Triangle,
Cross, Diamond, InvertedTriangle, SquareX, Asterisk, SquareCross, DiamondCross, CircleCross, X
)
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.resources import INLINE
from bokeh.util.browser import view
N = 9
x = np.linspace(-2, 2, N)
y = x**2
sizes = np.linspace(10, 20, N)
xpts = np.array([-.09, -.12, .0, .12, .09])
ypts = np.array([-.1, .02, .1, .02, -.1])
source = ColumnDataSource(dict(
x = x,
y = y,
sizes = sizes,
xs = [ xpts + xx for xx in x ],
ys = [ ypts + yy for yy in y ],
xp02 = x + 0.2,
xp01 = x + 0.1,
xm01 = x - 0.1,
yp01 = y + 0.1,
ym01 = y - 0.1,
))
xdr = DataRange1d()
ydr = DataRange1d()
def screen(value):
return dict(value=value, units="screen")
glyphs = [
("annular_wedge", AnnularWedge(x="x", y="y", inner_radius=screen(10), outer_radius=screen(20), start_angle=0.6, end_angle=4.1, fill_color="#8888ee")),
("annulus", Annulus(x="x", y="y", inner_radius=screen(10), outer_radius=screen(20), fill_color="#7FC97F")),
("arc", Arc(x="x", y="y", radius=screen(20), start_angle=0.6, end_angle=4.1, line_color="#BEAED4", line_width=3)),
("bezier", Bezier(x0="x", y0="y", x1="xp02", y1="y", cx0="xp01", cy0="yp01", cx1="xm01", cy1="ym01", line_color="#D95F02", line_width=2)),
("gear", Gear(x="x", y="y", module=0.1, teeth=8, angle=0, shaft_size=0.02, fill_color="#FDF6E3", line_color="#D95F02")),
("image_url", ImageURL(x="x", y="y", w=0.4, h=0.4, url=dict(value="http://bokeh.pydata.org/en/latest/_static/images/logo.png"), anchor="center")),
("line", Line(x="x", y="y", line_color="#F46D43")),
("multi_line", MultiLine(xs="xs", ys="ys", line_color="#8073AC", line_width=2)),
("oval", Oval(x="x", y="y", width=screen(15), height=screen(25), angle=-0.7, fill_color="#1D91C0")),
("patch", Patch(x="x", y="y", fill_color="#A6CEE3")),
("patches", Patches(xs="xs", ys="ys", fill_color="#FB9A99")),
("quad", Quad(left="x", right="xp01", top="y", bottom="ym01", fill_color="#B3DE69")),
("quadratic", Quadratic(x0="x", y0="y", x1="xp02", y1="y", cx="xp01", cy="yp01", line_color="#4DAF4A", line_width=3)),
("ray", Ray(x="x", y="y", length=45, angle=-0.7, line_color="#FB8072", line_width=2)),
("rect", Rect(x="x", y="y", width=screen(10), height=screen(20), angle=-0.7, fill_color="#CAB2D6")),
("segment", Segment(x0="x", y0="y", x1="xm01", y1="ym01", line_color="#F4A582", line_width=3)),
("text", Text(x="x", y="y", text=["hello"])),
("wedge", Wedge(x="x", y="y", radius=screen(15), start_angle=0.6, end_angle=4.1, fill_color="#B3DE69")),
]
markers = [
("circle", Circle(x="x", y="y", radius=0.1, fill_color="#3288BD")),
("circle_x", CircleX(x="x", y="y", size="sizes", line_color="#DD1C77", fill_color=None)),
("circle_cross", CircleCross(x="x", y="y", size="sizes", line_color="#FB8072", fill_color=None, line_width=2)),
("square", Square(x="x", y="y", size="sizes", fill_color="#74ADD1")),
("square_x", SquareX(x="x", y="y", size="sizes", line_color="#FDAE6B", fill_color=None, line_width=2)),
("square_cross", SquareCross(x="x", y="y", size="sizes", line_color="#7FC97F", fill_color=None, line_width=2)),
("diamond", Diamond(x="x", y="y", size="sizes", line_color="#1C9099", line_width=2)),
("diamond_cross", DiamondCross(x="x", y="y", size="sizes", line_color="#386CB0", fill_color=None, line_width=2)),
("triangle", Triangle(x="x", y="y", size="sizes", line_color="#99D594", line_width=2)),
("inverted_triangle", InvertedTriangle(x="x", y="y", size="sizes", line_color="#DE2D26", line_width=2)),
("cross", Cross(x="x", y="y", size="sizes", line_color="#E6550D", fill_color=None, line_width=2)),
("asterisk", Asterisk(x="x", y="y", size="sizes", line_color="#F0027F", fill_color=None, line_width=2)),
("x", X(x="x", y="y", size="sizes", line_color="thistle", fill_color=None, line_width=2)),
]
def make_tab(title, glyph):
plot = Plot(title=title, x_range=xdr, y_range=ydr)
plot.add_glyph(source, glyph)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
plot.add_tools(HoverTool())
tab = Panel(child=plot, title=title)
return tab
def make_tabs(objs):
return Tabs(tabs=[ make_tab(title, obj) for title, obj in objs ])
layout = VBox(children=[Paragraph(text="Only Image and ImageRGBA glyphs are not demonstrated."), make_tabs(glyphs), make_tabs(markers)])
doc = Document()
doc.add_root(layout)
if __name__ == "__main__":
filename = "glyphs.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Glyphs"))
print("Wrote %s" % filename)
view(filename)
|
pombredanne/bokeh
|
examples/models/glyphs.py
|
Python
|
bsd-3-clause
| 5,266
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2010 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module contains functionality for working with kinetics family functional
groups, including support for using group additivity to estimate rate
coefficients.
"""
import logging
import math
import numpy
from copy import deepcopy
from rmgpy.data.base import Database, DatabaseError, Entry, Group, LogicNode, getAllCombinations, makeLogicNode
from rmgpy.kinetics import Arrhenius, ArrheniusEP, KineticsData
from rmgpy.species import Species
from rmgpy.quantity import constants
from .common import KineticsError, UndeterminableKineticsError
################################################################################
class KineticsGroups(Database):
"""
A class for working with an RMG kinetics family group additivity values.
"""
def __init__(self,
entries=None,
top=None,
label='',
name='',
shortDesc='',
longDesc='',
forwardTemplate=None,
forwardRecipe=None,
reverseTemplate=None,
reverseRecipe=None,
forbidden=None
):
Database.__init__(self, entries, top, label, name, shortDesc, longDesc)
self.numReactants = 0
def __repr__(self):
return '<KineticsGroups "{0}">'.format(self.label)
def loadEntry(self, index, label, group, kinetics, reference=None, referenceType='', shortDesc='', longDesc=''):
if group[0:3].upper() == 'OR{' or group[0:4].upper() == 'AND{' or group[0:7].upper() == 'NOT OR{' or group[0:8].upper() == 'NOT AND{':
item = makeLogicNode(group)
else:
item = Group().fromAdjacencyList(group)
if label in self.entries:
raise DatabaseError("Duplicate group name {label} found in kinetics groups for {family} family.".format(label=label,family=self.label))
self.entries[label] = Entry(
index = index,
label = label,
item = item,
data = kinetics,
reference = reference,
referenceType = referenceType,
shortDesc = shortDesc,
longDesc = longDesc.strip(),
)
def getReactionTemplate(self, reaction):
"""
For a given `reaction` with properly-labeled :class:`Molecule` objects
as the reactants, determine the most specific nodes in the tree that
describe the reaction.
"""
# Get forward reaction template and remove any duplicates
forwardTemplate = self.top[:]
temporary = []
symmetricTree = False
for entry in forwardTemplate:
if entry not in temporary:
temporary.append(entry)
else:
# duplicate node found at top of tree
# eg. R_recombination: ['Y_rad', 'Y_rad']
assert len(forwardTemplate)==2 , 'Can currently only do symmetric trees with nothing else in them'
symmetricTree = True
forwardTemplate = temporary
# Descend reactant trees as far as possible
template = []
for entry in forwardTemplate:
# entry is a top-level node that should be matched
group = entry.item
# Identify the atom labels in a group if it is not a logical node
atomList = []
if not isinstance(entry.item, LogicNode):
atomList = group.getLabeledAtoms()
for reactant in reaction.reactants:
if isinstance(reactant, Species):
reactant = reactant.molecule[0]
# Match labeled atoms
# Check that this reactant has each of the atom labels in this group. If it is a LogicNode, the atomList is empty and
# it will proceed directly to the descendTree step.
if not all([reactant.containsLabeledAtom(label) for label in atomList]):
continue # don't try to match this structure - the atoms aren't there!
# Match structures
atoms = reactant.getLabeledAtoms()
# Descend the tree, making sure to match atomlabels exactly using strict = True
matched_node = self.descendTree(reactant, atoms, root=entry, strict=True)
if matched_node is not None:
template.append(matched_node)
#else:
# logging.warning("Couldn't find match for {0} in {1}".format(entry,atomList))
# logging.warning(reactant.toAdjacencyList())
# Get fresh templates (with duplicate nodes back in)
forwardTemplate = self.top[:]
if self.label.lower().startswith('r_recombination'):
forwardTemplate.append(forwardTemplate[0])
# Check that we were able to match the template.
# template is a list of the actual matched nodes
# forwardTemplate is a list of the top level nodes that should be matched
if len(template) != len(forwardTemplate):
# print 'len(template):', len(template)
# print 'len(forwardTemplate):', len(forwardTemplate)
msg = 'Unable to find matching template for reaction {0} in reaction family {1}.'.format(str(reaction), str(self))
msg += 'Trying to match {0} but matched {1}'.format(str(forwardTemplate),str(template))
# print 'reactants'
# for reactant in reaction.reactants:
# print reactant.toAdjacencyList() + '\n'
# print 'products'
# for product in reaction.products:
# print product.toAdjacencyList() + '\n'
raise UndeterminableKineticsError(reaction, message=msg)
return template
def estimateKineticsUsingGroupAdditivity(self, template, referenceKinetics, degeneracy=1):
"""
Determine the appropriate kinetics for a reaction with the given
`template` using group additivity.
"""
# Start with the generic kinetics of the top-level nodes
# Make a copy so we don't modify the original
kinetics = deepcopy(referenceKinetics)
# Now add in more specific corrections if possible
for node in template:
entry = node
comment_line = "Matched node "
while entry.data is None and entry not in self.top:
# Keep climbing tree until you find a (non-top) node with data.
comment_line += "{0} >> ".format(entry.label)
entry = entry.parent
if entry.data is not None and entry not in self.top:
kinetics = self.__multiplyKineticsData(kinetics, entry.data)
comment_line += "{0} ({1})".format(entry.label, entry.longDesc.split('\n')[0])
elif entry in self.top:
comment_line += "{0} (Top node)".format(entry.label)
kinetics.comment += comment_line + '\n'
# Also include reaction-path degeneracy
if isinstance(kinetics, KineticsData):
kinetics.kdata.value_si *= degeneracy
elif isinstance(kinetics, Arrhenius):
kinetics.A.value_si *= degeneracy
elif kinetics is not None:
raise KineticsError('Unexpected kinetics type "{0}" encountered while generating kinetics from group values.'.format(kinetics.__class__))
kinetics.comment += "Multiplied by reaction path degeneracy {0}".format(degeneracy)
return kinetics
def __multiplyKineticsData(self, kinetics1, kinetics2):
"""
Multiply two kinetics objects `kinetics1` and `kinetics2` of the same
class together, returning their product as a new kinetics object of
that class. Currently this only works for :class:`KineticsData` or
:class:`Arrhenius` objects.
"""
if isinstance(kinetics1, KineticsData) and isinstance(kinetics2, KineticsData):
if len(kinetics1.Tdata.value_si) != len(kinetics2.Tdata.value_si) or any([T1 != T2 for T1, T2 in zip(kinetics1.Tdata.value_si, kinetics2.Tdata.value_si)]):
raise KineticsError('Cannot add these KineticsData objects due to their having different temperature points.')
kinetics = KineticsData(
Tdata = (kinetics1.Tdata.value, kinetics2.Tdata.units),
kdata = (kinetics1.kdata.value * kinetics2.kdata.value, kinetics1.kdata.units),
)
elif isinstance(kinetics1, Arrhenius) and isinstance(kinetics2, Arrhenius):
assert kinetics1.A.units == kinetics2.A.units
assert kinetics1.Ea.units == kinetics2.Ea.units
assert kinetics1.T0.units == kinetics2.T0.units
assert kinetics1.T0.value == kinetics2.T0.value
kinetics = Arrhenius(
A = (kinetics1.A.value * kinetics2.A.value, kinetics1.A.units),
n = (kinetics1.n.value + kinetics2.n.value, kinetics1.n.units),
Ea = (kinetics1.Ea.value + kinetics2.Ea.value, kinetics1.Ea.units),
T0 = (kinetics1.T0.value, kinetics1.T0.units),
)
else:
raise KineticsError('Unable to multiply kinetics types "{0}" and "{1}".'.format(kinetics1.__class__, kinetics2.__class__))
if kinetics1.Tmin is not None and kinetics2.Tmin is not None:
kinetics.Tmin = kinetics1.Tmin if kinetics1.Tmin.value_si > kinetics2.Tmin.value_si else kinetics2.Tmin
elif kinetics1.Tmin is not None and kinetics2.Tmin is None:
kinetics.Tmin = kinetics1.Tmin
elif kinetics1.Tmin is None and kinetics2.Tmin is not None:
kinetics.Tmin = kinetics2.Tmin
if kinetics1.Tmax is not None and kinetics2.Tmax is not None:
kinetics.Tmax = kinetics1.Tmax if kinetics1.Tmax.value_si < kinetics2.Tmax.value_si else kinetics2.Tmax
elif kinetics1.Tmax is not None and kinetics2.Tmax is None:
kinetics.Tmax = kinetics1.Tmax
elif kinetics1.Tmax is None and kinetics2.Tmax is not None:
kinetics.Tmax = kinetics2.Tmax
if kinetics1.Pmin is not None and kinetics2.Pmin is not None:
kinetics.Pmin = kinetics1.Pmin if kinetics1.Pmin.value_si > kinetics2.Pmin.value_si else kinetics2.Pmin
elif kinetics1.Pmin is not None and kinetics2.Pmin is None:
kinetics.Pmin = kinetics1.Pmin
elif kinetics1.Pmin is None and kinetics2.Pmin is not None:
kinetics.Pmin = kinetics2.Pmin
if kinetics1.Pmax is not None and kinetics2.Pmax is not None:
kinetics.Pmax = kinetics1.Pmax if kinetics1.Pmax.value_si < kinetics2.Pmax.value_si else kinetics2.Pmax
elif kinetics1.Pmax is not None and kinetics2.Pmax is None:
kinetics.Pmax = kinetics1.Pmax
elif kinetics1.Pmax is None and kinetics2.Pmax is not None:
kinetics.Pmax = kinetics2.Pmax
if kinetics1.comment == '': kinetics.comment = kinetics2.comment
elif kinetics2.comment == '': kinetics.comment = kinetics1.comment
else: kinetics.comment = kinetics1.comment + ' + ' + kinetics2.comment
return kinetics
def generateGroupAdditivityValues(self, trainingSet, kunits, method='Arrhenius'):
"""
Generate the group additivity values using the given `trainingSet`,
a list of 2-tuples of the form ``(template, kinetics)``. You must also
specify the `kunits` for the family and the `method` to use when
generating the group values. Returns ``True`` if the group values have
changed significantly since the last time they were fitted, or ``False``
otherwise.
"""
# keep track of previous values so we can detect if they change
old_entries = dict()
for label,entry in self.entries.items():
if entry.data is not None:
old_entries[label] = entry.data
# Determine a complete list of the entries in the database, sorted as in the tree
groupEntries = self.top[:]
for entry in self.top:
groupEntries.extend(self.descendants(entry))
# Determine a unique list of the groups we will be able to fit parameters for
groupList = []
for template, kinetics in trainingSet:
for group in template:
if group not in self.top:
groupList.append(group)
groupList.extend(self.ancestors(group)[:-1])
groupList = list(set(groupList))
groupList.sort(key=lambda x: x.index)
if method == 'KineticsData':
# Fit a discrete set of k(T) data points by training against k(T) data
Tdata = numpy.array([300,400,500,600,800,1000,1500,2000])
# Initialize dictionaries of fitted group values and uncertainties
groupValues = {}; groupUncertainties = {}; groupCounts = {}; groupComments = {}
for entry in groupEntries:
groupValues[entry] = []
groupUncertainties[entry] = []
groupCounts[entry] = []
groupComments[entry] = set()
# Generate least-squares matrix and vector
A = []; b = []
kdata = []
for template, kinetics in trainingSet:
if isinstance(kinetics, (Arrhenius, KineticsData)):
kd = [kinetics.getRateCoefficient(T) for T in Tdata]
elif isinstance(kinetics, ArrheniusEP):
kd = [kinetics.getRateCoefficient(T, 0) for T in Tdata]
else:
raise Exception('Unexpected kinetics model of type {0} for template {1}.'.format(kinetics.__class__, template))
kdata.append(kd)
# Create every combination of each group and its ancestors with each other
combinations = []
for group in template:
groups = [group]; groups.extend(self.ancestors(group))
combinations.append(groups)
combinations = getAllCombinations(combinations)
# Add a row to the matrix for each combination
for groups in combinations:
Arow = [1 if group in groups else 0 for group in groupList]
Arow.append(1)
brow = [math.log10(k) for k in kd]
A.append(Arow); b.append(brow)
for group in groups:
groupComments[group].add("{0!s}".format(template))
if len(A) == 0:
logging.warning('Unable to fit kinetics groups for family "{0}"; no valid data found.'.format(self.label))
return
A = numpy.array(A)
b = numpy.array(b)
kdata = numpy.array(kdata)
x, residues, rank, s = numpy.linalg.lstsq(A, b)
for t, T in enumerate(Tdata):
# Determine error in each group (on log scale)
stdev = numpy.zeros(len(groupList)+1, numpy.float64)
count = numpy.zeros(len(groupList)+1, numpy.int)
for index in range(len(trainingSet)):
template, kinetics = trainingSet[index]
kd = math.log10(kdata[index,t])
km = x[-1,t] + sum([x[groupList.index(group),t] for group in template if group in groupList])
variance = (km - kd)**2
for group in template:
groups = [group]; groups.extend(self.ancestors(group))
for g in groups:
if g not in self.top:
ind = groupList.index(g)
stdev[ind] += variance
count[ind] += 1
stdev[-1] += variance
count[-1] += 1
stdev = numpy.sqrt(stdev / (count - 1))
import scipy.stats
ci = scipy.stats.t.ppf(0.975, count - 1) * stdev
# Update dictionaries of fitted group values and uncertainties
for entry in groupEntries:
if entry == self.top[0]:
groupValues[entry].append(10**x[-1,t])
groupUncertainties[entry].append(10**ci[-1])
groupCounts[entry].append(count[-1])
elif entry in groupList:
index = groupList.index(entry)
groupValues[entry].append(10**x[index,t])
groupUncertainties[entry].append(10**ci[index])
groupCounts[entry].append(count[index])
else:
groupValues[entry] = None
groupUncertainties[entry] = None
groupCounts[entry] = None
# Store the fitted group values and uncertainties on the associated entries
for entry in groupEntries:
if groupValues[entry] is not None:
entry.data = KineticsData(Tdata=(Tdata,"K"), kdata=(groupValues[entry],kunits))
if not any(numpy.isnan(numpy.array(groupUncertainties[entry]))):
entry.data.kdata.uncertainties = numpy.array(groupUncertainties[entry])
entry.data.kdata.uncertaintyType = '*|/'
entry.shortDesc = "Group additive kinetics."
entry.longDesc = "Fitted to {0} rates.\n".format(groupCounts[entry])
entry.longDesc += "\n".join(groupComments[entry])
else:
entry.data = None
elif method == 'Arrhenius':
# Fit Arrhenius parameters (A, n, Ea) by training against k(T) data
Tdata = numpy.array([300,400,500,600,800,1000,1500,2000])
logTdata = numpy.log(Tdata)
Tinvdata = 1000. / (constants.R * Tdata)
A = []; b = []
kdata = []
for template, kinetics in trainingSet:
if isinstance(kinetics, (Arrhenius, KineticsData)):
kd = [kinetics.getRateCoefficient(T) for T in Tdata]
elif isinstance(kinetics, ArrheniusEP):
kd = [kinetics.getRateCoefficient(T, 0) for T in Tdata]
else:
raise Exception('Unexpected kinetics model of type {0} for template {1}.'.format(kinetics.__class__, template))
kdata.append(kd)
# Create every combination of each group and its ancestors with each other
combinations = []
for group in template:
groups = [group]; groups.extend(self.ancestors(group))
combinations.append(groups)
combinations = getAllCombinations(combinations)
# Add a row to the matrix for each combination at each temperature
for t, T in enumerate(Tdata):
logT = logTdata[t]
Tinv = Tinvdata[t]
for groups in combinations:
Arow = []
for group in groupList:
if group in groups:
Arow.extend([1,logT,-Tinv])
else:
Arow.extend([0,0,0])
Arow.extend([1,logT,-Tinv])
brow = math.log(kd[t])
A.append(Arow); b.append(brow)
if len(A) == 0:
logging.warning('Unable to fit kinetics groups for family "{0}"; no valid data found.'.format(self.label))
return
A = numpy.array(A)
b = numpy.array(b)
kdata = numpy.array(kdata)
x, residues, rank, s = numpy.linalg.lstsq(A, b)
# Store the results
self.top[0].data = Arrhenius(
A = (math.exp(x[-3]),kunits),
n = x[-2],
Ea = (x[-1],"kJ/mol"),
T0 = (1,"K"),
)
for i, group in enumerate(groupList):
group.data = Arrhenius(
A = (math.exp(x[3*i]),kunits),
n = x[3*i+1],
Ea = (x[3*i+2],"kJ/mol"),
T0 = (1,"K"),
)
elif method == 'Arrhenius2':
# Fit Arrhenius parameters (A, n, Ea) by training against (A, n, Ea) values
A = []; b = []
for template, kinetics in trainingSet:
# Create every combination of each group and its ancestors with each other
combinations = []
for group in template:
groups = [group]; groups.extend(self.ancestors(group))
combinations.append(groups)
combinations = getAllCombinations(combinations)
# Add a row to the matrix for each parameter
if isinstance(kinetics, Arrhenius) or (isinstance(kinetics, ArrheniusEP) and kinetics.alpha.value_si == 0):
for groups in combinations:
Arow = []
for group in groupList:
if group in groups:
Arow.append(1)
else:
Arow.append(0)
Arow.append(1)
Ea = kinetics.E0.value_si if isinstance(kinetics, ArrheniusEP) else kinetics.Ea.value_si
brow = [math.log(kinetics.A.value_si), kinetics.n.value_si, Ea / 1000.]
A.append(Arow); b.append(brow)
if len(A) == 0:
logging.warning('Unable to fit kinetics groups for family "{0}"; no valid data found.'.format(self.label))
return
A = numpy.array(A)
b = numpy.array(b)
x, residues, rank, s = numpy.linalg.lstsq(A, b)
# Store the results
self.top[0].data = Arrhenius(
A = (math.exp(x[-1,0]),kunits),
n = x[-1,1],
Ea = (x[-1,2],"kJ/mol"),
T0 = (1,"K"),
)
for i, group in enumerate(groupList):
group.data = Arrhenius(
A = (math.exp(x[i,0]),kunits),
n = x[i,1],
Ea = (x[i,2],"kJ/mol"),
T0 = (1,"K"),
)
# Add a note to the history of each changed item indicating that we've generated new group values
changed = False
for label, entry in self.entries.items():
if entry.data is not None and old_entries.has_key(label):
if (isinstance(entry.data, KineticsData) and
isinstance(old_entries[label], KineticsData) and
len(entry.data.kdata.value_si) == len(old_entries[label].kdata.value_si) and
all(abs(entry.data.kdata.value_si / old_entries[label].kdata.value_si - 1) < 0.01)):
#print "New group values within 1% of old."
pass
elif (isinstance(entry.data, Arrhenius) and
isinstance(old_entries[label], Arrhenius) and
abs(entry.data.A.value_si / old_entries[label].A.value_si - 1) < 0.01 and
abs(entry.data.n.value_si / old_entries[label].n.value_si - 1) < 0.01 and
abs(entry.data.Ea.value_si / old_entries[label].Ea.value_si - 1) < 0.01 and
abs(entry.data.T0.value_si / old_entries[label].T0.value_si - 1) < 0.01):
#print "New group values within 1% of old."
pass
else:
changed = True
break
else:
changed = True
break
return changed
|
chatelak/RMG-Py
|
rmgpy/data/kinetics/groups.py
|
Python
|
mit
| 26,065
|
#!/usr/bin/env python
import unittest, re
from unittest import TestCase
from plasTeX.TeX import TeX
from plasTeX import Command, Environment
class Document(TestCase):
def testParentage(self):
class myenv(Environment):
pass
class foo(Command):
args = 'self'
s = TeX()
s.ownerDocument.context.importMacros(locals())
s.input(r'\begin{myenv}\foo{TEST}\end{myenv}')
output = s.parse()
myenv = output[0]
assert myenv.nodeName == 'myenv', myenv.nodeName
foo = output[0][0]
assert foo.nodeName == 'foo', foo.nodeName
children = output[0][0].childNodes
fooself = output[0][0].attributes['self']
assert children is fooself
# Check parents
assert fooself[0].parentNode is fooself, fooself[0].parentNode
assert foo.parentNode is myenv, foo.parentNode
assert myenv.parentNode is output, myenv.parentNode
assert output.parentNode is None, output.parentNode
# Check owner document
assert fooself[0].ownerDocument is output
assert foo.ownerDocument is output
assert myenv.ownerDocument is output
assert output.ownerDocument is output
if __name__ == '__main__':
unittest.main()
|
dav94/plastex
|
unittests/Document.py
|
Python
|
mit
| 1,288
|
# -*- coding: utf-8 -*-
"""Tests for the services module."""
# These tests require pytest, and mock. Mock comes with Python 3.3, but has
# also been backported for Python 2.7. It is available on pypi.
from __future__ import unicode_literals
import pytest
from soco.exceptions import SoCoUPnPException
from soco.services import Service
try:
from unittest import mock
except:
import mock # TODO: add mock to requirements
# Dummy known-good errors/responses etc. These are not necessarily valid as
# actual commands, but are valid XML/UPnP. They also contain unicode characters
# to test unicode handling.
DUMMY_ERROR = "".join([
'<?xml version="1.0"?>',
'<s:Envelope ',
'xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" ',
's:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">',
'<s:Body>',
'<s:Fault>',
'<faultcode>s:Client</faultcode>',
'<faultstring>UPnPError</faultstring>',
'<detail>',
'<UPnPError xmlns="urn:schemas-upnp-org:control-1-0">',
'<errorCode>607</errorCode>',
'<errorDescription>Oops μИⅠℂ☺ΔЄ💋</errorDescription>',
'</UPnPError>',
'</detail>',
'</s:Fault>',
'</s:Body>',
'</s:Envelope>']) # noqa PEP8
DUMMY_VALID_RESPONSE = "".join([
'<?xml version="1.0"?>',
'<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"',
' s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">',
'<s:Body>',
'<u:GetLEDStateResponse ',
'xmlns:u="urn:schemas-upnp-org:service:DeviceProperties:1">',
'<CurrentLEDState>On</CurrentLEDState>',
'<Unicode>μИⅠℂ☺ΔЄ💋</Unicode>',
'</u:GetLEDStateResponse>',
'</s:Body>',
'</s:Envelope>']) # noqa PEP8
DUMMY_VALID_ACTION = "".join([
'<?xml version="1.0"?>',
'<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"',
' s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">',
'<s:Body>',
'<u:SetAVTransportURI ',
'xmlns:u="urn:schemas-upnp-org:service:Service:1">',
'<InstanceID>0</InstanceID>',
'<CurrentURI>URI</CurrentURI>',
'<CurrentURIMetaData></CurrentURIMetaData>',
'<Unicode>μИⅠℂ☺ΔЄ💋</Unicode>'
'</u:SetAVTransportURI>',
'</s:Body>'
'</s:Envelope>']) # noqa PEP8
@pytest.fixture()
def service():
"""A mock Service, for use as a test fixture."""
mock_soco = mock.MagicMock()
mock_soco.ip_address = "192.168.1.101"
return Service(mock_soco)
def test_init_defaults(service):
"""Check default properties are set up correctly."""
assert service.service_type == "Service"
assert service.version == 1
assert service.service_id == "Service"
assert service.base_url == "http://192.168.1.101:1400"
assert service.control_url == "/Service/Control"
assert service.scpd_url == "/xml/Service1.xml"
assert service.event_subscription_url == "/Service/Event"
def test_method_dispatcher_function_creation(service):
"""Testing __getattr__ functionality."""
import inspect
# There should be no testing method
assert 'testing' not in service.__dict__.keys()
# but we should be able to inspect it
assert inspect.ismethod(service.testing)
# and then, having examined it, the method should be cached on the instance
assert 'testing' in service.__dict__.keys()
assert service.testing.__name__ == "testing"
# check that send_command is actually called when we invoke a method
service.send_command = lambda x, y: "Hello {0}".format(x)
assert service.testing(service) == "Hello testing"
def test_method_dispatcher_arg_count(service):
"""_dispatcher should pass its args to send_command."""
service.send_command = mock.Mock()
# http://bugs.python.org/issue7688
# __name__ must be a string in python 2
method = service.__getattr__(str('test'))
assert method('onearg')
service.send_command.assert_called_with('test', 'onearg')
assert method() # no args
service.send_command.assert_called_with('test')
assert method('one', cache_timeout=4) # one arg + cache_timeout
service.send_command.assert_called_with('test', 'one', cache_timeout=4)
def test_wrap(service):
"""wrapping args in XML properly."""
assert service.wrap_arguments([('first', 'one'), ('second', 2)]) == \
"<first>one</first><second>2</second>"
assert service.wrap_arguments() == ""
# Unicode
assert service.wrap_arguments([('unicode', "μИⅠℂ☺ΔЄ💋")]) == \
"<unicode>μИⅠℂ☺ΔЄ💋</unicode>"
# XML escaping - do we also need ' ?
assert service.wrap_arguments([('weird', '&<"2')]) == \
"<weird>&<"2</weird>"
def test_unwrap(service):
"""unwrapping args from XML."""
assert service.unwrap_arguments(DUMMY_VALID_RESPONSE) == {
"CurrentLEDState": "On",
"Unicode": "μИⅠℂ☺ΔЄ💋"}
def test_build_command(service):
"""Test creation of SOAP body and headers from a command."""
headers, body = service.build_command('SetAVTransportURI', [
('InstanceID', 0),
('CurrentURI', 'URI'),
('CurrentURIMetaData', ''),
('Unicode', 'μИⅠℂ☺ΔЄ💋')
])
assert body == DUMMY_VALID_ACTION
assert headers == {
'Content-Type': 'text/xml; charset="utf-8"',
'SOAPACTION':
'urn:schemas-upnp-org:service:Service:1#SetAVTransportURI'}
def test_send_command(service):
"""Calling a command should result in a http request, unless the cache is
hit."""
response = mock.MagicMock()
response.headers = {}
response.status_code = 200
response.text = DUMMY_VALID_RESPONSE
with mock.patch('requests.post', return_value=response) as fake_post:
result = service.send_command('SetAVTransportURI', [
('InstanceID', 0),
('CurrentURI', 'URI'),
('CurrentURIMetaData', ''),
('Unicode', 'μИⅠℂ☺ΔЄ💋')
], cache_timeout=2)
assert result == {'CurrentLEDState': 'On', 'Unicode': "μИⅠℂ☺ΔЄ💋"}
fake_post.assert_called_once_with(
'http://192.168.1.101:1400/Service/Control',
headers=mock.ANY, data=DUMMY_VALID_ACTION.encode('utf-8'))
# Now the cache should be primed, so try it again
fake_post.reset_mock()
result = service.send_command('SetAVTransportURI', [
('InstanceID', 0),
('CurrentURI', 'URI'),
('CurrentURIMetaData', ''),
('Unicode', 'μИⅠℂ☺ΔЄ💋')
], cache_timeout=0)
# The cache should be hit, so there should be no http request
assert not fake_post.called
# but this should not affefct a call with different params
fake_post.reset_mock()
result = service.send_command('SetAVTransportURI', [
('InstanceID', 1),
('CurrentURI', 'URI2'),
('CurrentURIMetaData', 'abcd'),
('Unicode', 'μИⅠℂ☺ΔЄ💋')
])
assert fake_post.called
# calling again after the time interval will avoid the cache
fake_post.reset_mock()
import time
time.sleep(2)
result = service.send_command('SetAVTransportURI', [
('InstanceID', 0),
('CurrentURI', 'URI'),
('CurrentURIMetaData', ''),
('Unicode', 'μИⅠℂ☺ΔЄ💋')
])
assert fake_post.called
def test_handle_upnp_error(service):
"""Check errors are extracted properly."""
with pytest.raises(SoCoUPnPException) as E:
service.handle_upnp_error(DUMMY_ERROR)
assert "UPnP Error 607 received: Signature Failure from 192.168.1.101" \
== E.value.message
assert E.value.error_code == '607'
assert E.value.error_description == 'Signature Failure'
# TODO: Try this with a None Error Code
# TODO: test iter_actions
|
dundeemt/SoCo
|
tests/test_services.py
|
Python
|
mit
| 8,183
|
#!/usr/bin/env python
"""
Test AMQP library.
Repeatedly receive messages from the demo_send.py
script, until it receives a message with 'quit' as the body.
2007-11-11 Barry Pederson <bp@barryp.org>
"""
from optparse import OptionParser
import amqplib.client_0_8 as amqp
def callback(msg):
for key, val in msg.properties.items():
print ('%s: %s' % (key, str(val)))
for key, val in msg.delivery_info.items():
print ('> %s: %s' % (key, str(val)))
print ('')
print (msg.body)
print ('-------')
msg.channel.basic_ack(msg.delivery_tag)
#
# Cancel this callback
#
if msg.body == 'quit':
msg.channel.basic_cancel(msg.consumer_tag)
def main():
parser = OptionParser()
parser.add_option('--host', dest='host',
help='AMQP server to connect to (default: %default)',
default='localhost')
parser.add_option('-u', '--userid', dest='userid',
help='userid to authenticate as (default: %default)',
default='guest')
parser.add_option('-p', '--password', dest='password',
help='password to authenticate with (default: %default)',
default='guest')
parser.add_option('--ssl', dest='ssl', action='store_true',
help='Enable SSL (default: not enabled)',
default=False)
options, args = parser.parse_args()
conn = amqp.Connection(options.host, userid=options.userid, password=options.password, ssl=options.ssl)
ch = conn.channel()
ch.access_request('/data', active=True, read=True)
ch.exchange_declare('myfan', 'fanout', auto_delete=True)
qname, _, _ = ch.queue_declare()
ch.queue_bind(qname, 'myfan')
ch.basic_consume(qname, callback=callback)
#
# Loop as long as the channel has callbacks registered
#
while ch.callbacks:
ch.wait()
ch.close()
conn.close()
if __name__ == '__main__':
main()
|
mzdaniel/oh-mainline
|
vendor/packages/amqplib/demo/demo_receive.py
|
Python
|
agpl-3.0
| 2,026
|
# Miro - an RSS based video player application
# Copyright 2009 - Participatory Culture Foundation
#
# This file is part of vidscraper.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def search_string_from_terms(include_terms, exclude_terms=None):
marked_exclude_terms = ['-' + term for term in exclude_terms or []]
search_term_list = list(include_terms) + marked_exclude_terms
search_terms = ' '.join(search_term_list)
return search_terms
|
norayr/unisubs
|
libs/vidscraper/metasearch/util.py
|
Python
|
agpl-3.0
| 1,646
|
from django.conf import settings
from typing import Text
from zerver.lib.utils import make_safe_digest
from zerver.models import UserProfile
import hashlib
def gravatar_hash(email: Text) -> Text:
"""Compute the Gravatar hash for an email address."""
# Non-ASCII characters aren't permitted by the currently active e-mail
# RFCs. However, the IETF has published https://tools.ietf.org/html/rfc4952,
# outlining internationalization of email addresses, and regardless if we
# typo an address or someone manages to give us a non-ASCII address, let's
# not error out on it.
return make_safe_digest(email.lower(), hashlib.md5)
def user_avatar_hash(uid: Text) -> Text:
# WARNING: If this method is changed, you may need to do a migration
# similar to zerver/migrations/0060_move_avatars_to_be_uid_based.py .
# The salt probably doesn't serve any purpose now. In the past we
# used a hash of the email address, not the user ID, and we salted
# it in order to make the hashing scheme different from Gravatar's.
user_key = uid + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
def user_avatar_path(user_profile: UserProfile) -> Text:
# WARNING: If this method is changed, you may need to do a migration
# similar to zerver/migrations/0060_move_avatars_to_be_uid_based.py .
return user_avatar_path_from_ids(user_profile.id, user_profile.realm_id)
def user_avatar_path_from_ids(user_profile_id: int, realm_id: int) -> Text:
user_id_hash = user_avatar_hash(str(user_profile_id))
return '%s/%s' % (str(realm_id), user_id_hash)
|
mahim97/zulip
|
zerver/lib/avatar_hash.py
|
Python
|
apache-2.0
| 1,623
|
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Topographical Factor Analysis (TFA)
This implementation is based on the work in [Manning2014]_ and
[AndersonM2016]_.
.. [Manning2014] "Topographic factor analysis: a bayesian model for inferring
brain networks from neural data", J. R. Manning, R. Ranganath, K. A. Norman,
and D. M. Blei.PLoS One, vol. 9, no. 5, 2014.
.. [AndersonM2016] "Scaling Up Multi-Subject Neuroimaging Factor Analysis"
Michael J. Anderson, Mihai Capota, Javier S. Turek, Xia Zhu,
Theodore L. Willke, Yida Wang, Po-Hsuan Chen, Jeremy R. Manning,
Peter J. Ramadge, and Kenneth A. Norman
2016.
"""
# Authors: Xia Zhu (Intel Labs), Jeremy Manning (Dartmouth College) 2015~2016
from sklearn.base import BaseEstimator
from sklearn.metrics import mean_squared_error
from sklearn.cluster import KMeans
from scipy.optimize import least_squares
from scipy.optimize import linear_sum_assignment
from scipy.spatial import distance
from ..utils.utils import from_tri_2_sym, from_sym_2_tri
from . import tfa_extension # type: ignore
import numpy as np
import math
import gc
import logging
__all__ = [
"TFA",
]
logger = logging.getLogger(__name__)
class TFA(BaseEstimator):
"""Topographical Factor Analysis (TFA)
Given data from one subject, factorize it as a spatial factor F and
a weight matrix W.
Parameters
----------
max_iter : int, default: 10
Number of iterations to run the algorithm.
threshold : float, default: 1.0
Tolerance for terminating the parameter estimation
K : int, default: 50
Number of factors to compute
nlss_method : {'trf', 'dogbox', 'lm'}, default: 'trf'
Non-Linear Least Square (NLSS) algorithm used by scipy.least_suqares to
perform minimization. More information at
http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.optimize.least_squares.html
nlss_loss: str or callable, default: 'linear'
Loss function used by scipy.least_squares.
More information at
http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.optimize.least_squares.html
jac : {'2-point', '3-point', 'cs', callable}, default: '2-point'
Method of computing the Jacobian matrix.
More information at
http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.optimize.least_squares.html
x_scale : float or array_like or 'jac', default: 1.0
Characteristic scale of each variable for scipy.least_suqares.
More information at
http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.optimize.least_squares.html
tr_solver: {None, 'exact', 'lsmr'}, default: None
Method for solving trust-region subproblems, relevant only for 'trf'
and 'dogbox' methods.
More information at
http://docs.scipy.org/doc/scipy-0.17.0/reference/generated/scipy.optimize.least_squares.html
weight_method : {'rr','ols'}, default: 'rr'
Method for estimating weight matrix W given X and F.
'rr' means ridge regression, 'ols' means ordinary least square.
upper_ratio : float, default: 1.8
The upper bound of the ratio between factor's width
and maximum sigma of scanner coordinates.
lower_ratio : float, default: 0.02
The lower bound of the ratio between factor's width
and maximum sigma of scanner coordinates.
max_num_voxel : int, default: 5000
The maximum number of voxels to subsample.
max_num_tr : int, default: 500
The maximum number of TRs to subsample.
seed : int, default: 100
Seed for subsampling voxels and trs.
verbose : boolean, default: False
Verbose mode flag.
Attributes
----------
local_posterior_ : 1D array
Local posterior on subject's centers and widths
F_ : 2D array, in shape [n_voxel, K]
Latent factors of the subject
W_ : 2D array, in shape [K, n_tr]
Weight matrix of the subject
"""
def __init__(
self,
max_iter=10,
threshold=1.0,
K=50,
nlss_method='trf',
nlss_loss='soft_l1',
jac='2-point',
x_scale='jac',
tr_solver=None,
weight_method='rr',
upper_ratio=1.8,
lower_ratio=0.02,
max_num_tr=500,
max_num_voxel=5000,
seed=100,
verbose=False):
self.miter = max_iter
self.threshold = threshold
self.K = K
self.nlss_method = nlss_method
self.nlss_loss = nlss_loss
self.jac = jac
self.x_scale = x_scale
self.tr_solver = tr_solver
self.weight_method = weight_method
self.upper_ratio = upper_ratio
self.lower_ratio = lower_ratio
self.max_num_tr = max_num_tr
self.max_num_voxel = max_num_voxel
self.seed = seed
self.verbose = verbose
def set_K(self, K):
"""set K for the subject
Parameters
----------
K : integer
Number of latent factor.
Returns
-------
TFA
Returns the instance itself.
"""
self.K = K
return self
def set_prior(self, prior):
"""set prior for the subject
Parameters
----------
prior : 1D array, with K*(n_dim+1) elements
Subject prior of centers and widths.
Returns
-------
TFA
Returns the instance itself.
"""
self.local_prior = prior
return self
def set_seed(self, seed):
"""set seed for the subject
Parameters
----------
seed : int
Seed for subsampling voxels and trs
Returns
-------
TFA
Returns the instance itself.
"""
self.seed = seed
return self
def init_prior(self, R):
"""initialize prior for the subject
Returns
-------
TFA
Returns the instance itself.
"""
centers, widths = self.init_centers_widths(R)
# update prior
prior = np.zeros(self.K * (self.n_dim + 1))
self.set_centers(prior, centers)
self.set_widths(prior, widths)
self.set_prior(prior)
return self
def _assign_posterior(self):
"""assign posterior to prior based on Hungarian algorithm
Returns
-------
TFA
Returns the instance itself.
"""
prior_centers = self.get_centers(self.local_prior)
posterior_centers = self.get_centers(self.local_posterior_)
posterior_widths = self.get_widths(self.local_posterior_)
# linear assignment on centers
cost = distance.cdist(prior_centers, posterior_centers, 'euclidean')
_, col_ind = linear_sum_assignment(cost)
# reorder centers/widths based on cost assignment
self.set_centers(self.local_posterior_, posterior_centers[col_ind])
self.set_widths(self.local_posterior_, posterior_widths[col_ind])
return self
def _converged(self):
"""Check convergence based on maximum absolute difference
Returns
-------
converged : boolean
Whether the parameter estimation converged.
max_diff : float
Maximum absolute difference between prior and posterior.
"""
diff = self.local_prior - self.local_posterior_
max_diff = np.max(np.fabs(diff))
if self.verbose:
_, mse = self._mse_converged()
diff_ratio = np.sum(diff ** 2) / np.sum(self.local_posterior_ ** 2)
logger.info(
'tfa prior posterior max diff %f mse %f diff_ratio %f' %
((max_diff, mse, diff_ratio)))
if max_diff > self.threshold:
return False, max_diff
else:
return True, max_diff
def _mse_converged(self):
"""Check convergence based on mean squared error
Returns
-------
converged : boolean
Whether the parameter estimation converged.
mse : float
Mean squared error between prior and posterior.
"""
mse = mean_squared_error(self.local_prior, self.local_posterior_,
multioutput='uniform_average')
if mse > self.threshold:
return False, mse
else:
return True, mse
def get_map_offset(self):
"""Compute offset of prior/posterior
Returns
-------
map_offest : 1D array
The offset to different fields in prior/posterior
"""
nfield = 4
self.map_offset = np.zeros(nfield).astype(int)
field_size = self.K * np.array([self.n_dim, 1, self.cov_vec_size, 1])
for i in np.arange(nfield - 1) + 1:
self.map_offset[i] = self.map_offset[i - 1] + field_size[i - 1]
return self.map_offset
def init_centers_widths(self, R):
"""Initialize prior of centers and widths
Returns
-------
centers : 2D array, with shape [K, n_dim]
Prior of factors' centers.
widths : 1D array, with shape [K, 1]
Prior of factors' widths.
"""
kmeans = KMeans(
init='k-means++',
n_clusters=self.K,
n_init=10,
random_state=100)
kmeans.fit(R)
centers = kmeans.cluster_centers_
widths = self._get_max_sigma(R) * np.ones((self.K, 1))
return centers, widths
def get_template(self, R):
"""Compute a template on latent factors
Parameters
----------
R : 2D array, in format [n_voxel, n_dim]
The scanner coordinate matrix of one subject's fMRI data
Returns
-------
template_prior : 1D array
The template prior.
template_centers_cov: 2D array, in shape [n_dim, n_dim]
The template on centers' covariance.
template_widths_var: float
The template on widths' variance
"""
centers, widths = self.init_centers_widths(R)
template_prior =\
np.zeros(self.K * (self.n_dim + 2 + self.cov_vec_size))
# template centers cov and widths var are const
template_centers_cov = np.cov(R.T) * math.pow(self.K, -2 / 3.0)
template_widths_var = self._get_max_sigma(R)
centers_cov_all = np.tile(from_sym_2_tri(template_centers_cov), self.K)
widths_var_all = np.tile(template_widths_var, self.K)
# initial mean of centers' mean
self.set_centers(template_prior, centers)
self.set_widths(template_prior, widths)
self.set_centers_mean_cov(template_prior, centers_cov_all)
self.set_widths_mean_var(template_prior, widths_var_all)
return template_prior, template_centers_cov, template_widths_var
def set_centers(self, estimation, centers):
"""Set estimation on centers
Parameters
----------
estimation : 1D arrary
Either prior or posterior estimation
centers : 2D array, in shape [K, n_dim]
Estimation on centers
"""
estimation[0:self.map_offset[1]] = centers.ravel()
def set_widths(self, estimation, widths):
"""Set estimation on widths
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
widths : 2D array, in shape [K, 1]
Estimation on widths
"""
estimation[self.map_offset[1]:self.map_offset[2]] = widths.ravel()
def set_centers_mean_cov(self, estimation, centers_mean_cov):
"""Set estimation on centers
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
centers : 2D array, in shape [K, n_dim]
Estimation on centers
"""
estimation[self.map_offset[2]:self.map_offset[3]] =\
centers_mean_cov.ravel()
def set_widths_mean_var(self, estimation, widths_mean_var):
"""Set estimation on centers
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
centers : 2D array, in shape [K, n_dim]
Estimation on centers
"""
estimation[self.map_offset[3]:] = widths_mean_var.ravel()
def get_centers(self, estimation):
"""Get estimation on centers
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
Returns
-------
centers : 2D array, in shape [K, n_dim]
Estimation on centers
"""
centers = estimation[0:self.map_offset[1]]\
.reshape(self.K, self.n_dim)
return centers
def get_widths(self, estimation):
"""Get estimation on widths
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
Returns
-------
fields : 2D array, in shape [K, 1]
Estimation of widths
"""
widths = estimation[self.map_offset[1]:self.map_offset[2]]\
.reshape(self.K, 1)
return widths
def get_centers_mean_cov(self, estimation):
"""Get estimation on the covariance of centers' mean
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
Returns
-------
centers_mean_cov : 2D array, in shape [K, cov_vec_size]
Estimation of the covariance of centers' mean
"""
centers_mean_cov = estimation[self.map_offset[2]:self.map_offset[3]]\
.reshape(self.K, self.cov_vec_size)
return centers_mean_cov
def get_widths_mean_var(self, estimation):
"""Get estimation on the variance of widths' mean
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
Returns
-------
widths_mean_var : 2D array, in shape [K, 1]
Estimation on variance of widths' mean
"""
widths_mean_var = \
estimation[self.map_offset[3]:].reshape(self.K, 1)
return widths_mean_var
def get_factors(self, unique_R, inds, centers, widths):
"""Calculate factors based on centers and widths
Parameters
----------
unique_R : a list of array,
Each element contains unique value in one dimension of
scanner coordinate matrix R.
inds : a list of array,
Each element contains the indices to reconstruct one
dimension of original cooridnate matrix from the unique
array.
centers : 2D array, with shape [K, n_dim]
The centers of factors.
widths : 1D array, with shape [K, 1]
The widths of factors.
Returns
-------
F : 2D array, with shape [n_voxel,self.K]
The latent factors from fMRI data.
"""
F = np.zeros((len(inds[0]), self.K))
tfa_extension.factor(
F,
centers,
widths,
unique_R[0],
unique_R[1],
unique_R[2],
inds[0],
inds[1],
inds[2])
return F
def get_weights(self, data, F):
"""Calculate weight matrix based on fMRI data and factors
Parameters
----------
data : 2D array, with shape [n_voxel, n_tr]
fMRI data from one subject
F : 2D array, with shape [n_voxel,self.K]
The latent factors from fMRI data.
Returns
-------
W : 2D array, with shape [K, n_tr]
The weight matrix from fMRI data.
"""
beta = np.var(data)
trans_F = F.T.copy()
W = np.zeros((self.K, data.shape[1]))
if self.weight_method == 'rr':
W = np.linalg.solve(trans_F.dot(F) + beta * np.identity(self.K),
trans_F.dot(data))
else:
W = np.linalg.solve(trans_F.dot(F), trans_F.dot(data))
return W
def _get_max_sigma(self, R):
"""Calculate maximum sigma of scanner RAS coordinates
Parameters
----------
R : 2D array, with shape [n_voxel, n_dim]
The coordinate matrix of fMRI data from one subject
Returns
-------
max_sigma : float
The maximum sigma of scanner coordinates.
"""
max_sigma = 2.0 * math.pow(np.nanmax(np.std(R, axis=0)), 2)
return max_sigma
def get_bounds(self, R):
"""Calculate lower and upper bounds for centers and widths
Parameters
----------
R : 2D array, with shape [n_voxel, n_dim]
The coordinate matrix of fMRI data from one subject
Returns
-------
bounds : 2-tuple of array_like, default: None
The lower and upper bounds on factor's centers and widths.
"""
max_sigma = self._get_max_sigma(R)
final_lower = np.zeros(self.K * (self.n_dim + 1))
final_lower[0:self.K * self.n_dim] =\
np.tile(np.nanmin(R, axis=0), self.K)
final_lower[self.K * self.n_dim:] =\
np.repeat(self.lower_ratio * max_sigma, self.K)
final_upper = np.zeros(self.K * (self.n_dim + 1))
final_upper[0:self.K * self.n_dim] =\
np.tile(np.nanmax(R, axis=0), self.K)
final_upper[self.K * self.n_dim:] =\
np.repeat(self.upper_ratio * max_sigma, self.K)
bounds = (final_lower, final_upper)
return bounds
def _residual_multivariate(
self,
estimate,
unique_R,
inds,
X,
W,
template_centers,
template_centers_mean_cov,
template_widths,
template_widths_mean_var_reci,
data_sigma):
"""Residual function for estimating centers and widths
Parameters
----------
estimate : 1D array
Initial estimation on centers
unique_R : a list of array,
Each element contains unique value in one dimension of
coordinate matrix R.
inds : a list of array,
Each element contains the indices to reconstruct one
dimension of original cooridnate matrix from the unique
array.
X : 2D array, with shape [n_voxel, n_tr]
fMRI data from one subject.
W : 2D array, with shape [K, n_tr]
The weight matrix.
template_centers: 2D array, with shape [K, n_dim]
The template prior on centers
template_centers_mean_cov: 2D array, with shape [K, cov_size]
The template prior on covariance of centers' mean
template_widths: 1D array
The template prior on widths
template_widths_mean_var_reci: 1D array
The reciprocal of template prior on variance of widths' mean
data_sigma: float
The variance of X.
Returns
-------
final_err : 1D array
The residual function for estimating centers.
"""
centers = self.get_centers(estimate)
widths = self.get_widths(estimate)
recon = X.size
other_err = 0 if template_centers is None else (2 * self.K)
final_err = np.zeros(recon + other_err)
F = self.get_factors(unique_R, inds, centers, widths)
sigma = np.zeros((1,))
sigma[0] = data_sigma
tfa_extension.recon(final_err[0:recon], X, F, W, sigma)
if other_err > 0:
# center error
for k in np.arange(self.K):
diff = (centers[k] - template_centers[k])
cov = from_tri_2_sym(template_centers_mean_cov[k], self.n_dim)
final_err[recon + k] = math.sqrt(
self.sample_scaling *
diff.dot(np.linalg.solve(cov, diff.T)))
# width error
base = recon + self.K
dist = template_widths_mean_var_reci *\
(widths - template_widths) ** 2
final_err[base:] = np.sqrt(self.sample_scaling * dist).ravel()
return final_err
def _estimate_centers_widths(
self,
unique_R,
inds,
X,
W,
init_centers,
init_widths,
template_centers,
template_widths,
template_centers_mean_cov,
template_widths_mean_var_reci):
"""Estimate centers and widths
Parameters
----------
unique_R : a list of array,
Each element contains unique value in one dimension of
coordinate matrix R.
inds : a list of array,
Each element contains the indices to reconstruct one
dimension of original cooridnate matrix from the unique
array.
X : 2D array, with shape [n_voxel, n_tr]
fMRI data from one subject.
W : 2D array, with shape [K, n_tr]
The weight matrix.
init_centers : 2D array, with shape [K, n_dim]
The initial values of centers.
init_widths : 1D array
The initial values of widths.
template_centers: 1D array
The template prior on centers
template_widths: 1D array
The template prior on widths
template_centers_mean_cov: 2D array, with shape [K, cov_size]
The template prior on centers' mean
template_widths_mean_var_reci: 1D array
The reciprocal of template prior on variance of widths' mean
Returns
-------
final_estimate.x: 1D array
The newly estimated centers and widths.
final_estimate.cost: float
The cost value.
"""
# least_squares only accept x in 1D format
init_estimate = np.hstack(
(init_centers.ravel(), init_widths.ravel())) # .copy()
data_sigma = 1.0 / math.sqrt(2.0) * np.std(X)
final_estimate = least_squares(
self._residual_multivariate,
init_estimate,
args=(
unique_R,
inds,
X,
W,
template_centers,
template_widths,
template_centers_mean_cov,
template_widths_mean_var_reci,
data_sigma),
method=self.nlss_method,
loss=self.nlss_loss,
bounds=self.bounds,
verbose=0,
x_scale=self.x_scale,
tr_solver=self.tr_solver)
return final_estimate.x, final_estimate.cost
def _fit_tfa(self, data, R, template_prior=None):
"""TFA main algorithm
Parameters
----------
data: 2D array, in shape [n_voxel, n_tr]
The fMRI data from one subject.
R : 2D array, in shape [n_voxel, n_dim]
The voxel coordinate matrix of fMRI data
template_prior : 1D array,
The template prior on centers and widths.
Returns
-------
TFA
Returns the instance itself.
"""
if template_prior is None:
template_centers = None
template_widths = None
template_centers_mean_cov = None
template_widths_mean_var_reci = None
else:
template_centers = self.get_centers(template_prior)
template_widths = self.get_widths(template_prior)
template_centers_mean_cov =\
self.get_centers_mean_cov(template_prior)
template_widths_mean_var_reci = 1.0 /\
self.get_widths_mean_var(template_prior)
inner_converged = False
np.random.seed(self.seed)
n = 0
while n < self.miter and not inner_converged:
self._fit_tfa_inner(
data,
R,
template_centers,
template_widths,
template_centers_mean_cov,
template_widths_mean_var_reci)
self._assign_posterior()
inner_converged, _ = self._converged()
if not inner_converged:
self.local_prior = self.local_posterior_
else:
logger.info("TFA converged at %d iteration." % (n))
n += 1
gc.collect()
return self
def get_unique_R(self, R):
"""Get unique vlaues from coordinate matrix
Parameters
----------
R : 2D array
The coordinate matrix of a subject's fMRI data
Return
------
unique_R : a list of array,
Each element contains unique value in one dimension of
coordinate matrix R.
inds : a list of array,
Each element contains the indices to reconstruct one
dimension of original cooridnate matrix from the unique
array.
"""
unique_R = []
inds = []
for d in np.arange(self.n_dim):
tmp_unique, tmp_inds = np.unique(R[:, d], return_inverse=True)
unique_R.append(tmp_unique)
inds.append(tmp_inds)
return unique_R, inds
def _fit_tfa_inner(
self,
data,
R,
template_centers,
template_widths,
template_centers_mean_cov,
template_widths_mean_var_reci):
"""Fit TFA model, the inner loop part
Parameters
----------
data: 2D array, in shape [n_voxel, n_tr]
The fMRI data of a subject
R : 2D array, in shape [n_voxel, n_dim]
The voxel coordinate matrix of fMRI data
template_centers: 1D array
The template prior on centers
template_widths: 1D array
The template prior on widths
template_centers_mean_cov: 2D array, with shape [K, cov_size]
The template prior on covariance of centers' mean
template_widths_mean_var_reci: 1D array
The reciprocal of template prior on variance of widths' mean
Returns
-------
TFA
Returns the instance itself.
"""
nfeature = data.shape[0]
nsample = data.shape[1]
feature_indices =\
np.random.choice(nfeature, self.max_num_voxel, replace=False)
sample_features = np.zeros(nfeature).astype(bool)
sample_features[feature_indices] = True
samples_indices =\
np.random.choice(nsample, self.max_num_tr, replace=False)
curr_data = np.zeros((self.max_num_voxel, self.max_num_tr))\
.astype(float)
curr_data = data[feature_indices]
curr_data = curr_data[:, samples_indices].copy()
curr_R = R[feature_indices].copy()
centers = self.get_centers(self.local_prior)
widths = self.get_widths(self.local_prior)
unique_R, inds = self.get_unique_R(curr_R)
F = self.get_factors(unique_R, inds, centers, widths)
W = self.get_weights(curr_data, F)
self.local_posterior_, self.total_cost = self._estimate_centers_widths(
unique_R, inds, curr_data, W, centers, widths,
template_centers, template_centers_mean_cov,
template_widths, template_widths_mean_var_reci)
return self
def fit(self, X, R, template_prior=None):
""" Topographical Factor Analysis (TFA)[Manning2014]
Parameters
----------
X : 2D array, in shape [n_voxel, n_sample]
The fMRI data of one subject
R : 2D array, in shape [n_voxel, n_dim]
The voxel coordinate matrix of fMRI data
template_prior : None or 1D array
The template prior as an extra constraint
None when fitting TFA alone
"""
if self.verbose:
logger.info('Start to fit TFA ')
if not isinstance(X, np.ndarray):
raise TypeError("Input data should be an array")
if X.ndim != 2:
raise TypeError("Input data should be 2D array")
if not isinstance(R, np.ndarray):
raise TypeError("Input coordinate matrix should be an array")
if R.ndim != 2:
raise TypeError("Input coordinate matrix should be 2D array")
if X.shape[0] != R.shape[0]:
raise TypeError(
"The number of voxels should be the same in X and R!")
if self.weight_method != 'rr' and self.weight_method != 'ols':
raise ValueError(
"only 'rr' and 'ols' are accepted as weight_method!")
# main algorithm
self.n_dim = R.shape[1]
self.cov_vec_size = np.sum(np.arange(self.n_dim) + 1)
self.map_offset = self.get_map_offset()
self.bounds = self.get_bounds(R)
n_voxel = X.shape[0]
n_tr = X.shape[1]
self.sample_scaling = 0.5 * float(
self.max_num_voxel * self.max_num_tr) / float(n_voxel * n_tr)
if template_prior is None:
self.init_prior(R)
else:
self.local_prior = template_prior[0: self.map_offset[2]]
self._fit_tfa(X, R, template_prior)
if template_prior is None:
centers = self.get_centers(self.local_posterior_)
widths = self.get_widths(self.local_posterior_)
unique_R, inds = self.get_unique_R(R)
self.F_ = self.get_factors(unique_R, inds, centers, widths)
self.W_ = self.get_weights(X, self.F_)
return self
|
lcnature/brainiak
|
brainiak/factoranalysis/tfa.py
|
Python
|
apache-2.0
| 30,560
|
# Copyright (c) 2012 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import netaddr
from oslo_config import cfg
import six
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.common import constants as const
from neutron.common import exceptions as nexception
from neutron import manager
from neutron.openstack.common import uuidutils
from neutron import quota
# Security group Exceptions
class SecurityGroupInvalidPortRange(nexception.InvalidInput):
message = _("For TCP/UDP protocols, port_range_min must be "
"<= port_range_max")
class SecurityGroupInvalidPortValue(nexception.InvalidInput):
message = _("Invalid value for port %(port)s")
class SecurityGroupInvalidIcmpValue(nexception.InvalidInput):
message = _("Invalid value for ICMP %(field)s (%(attr)s) "
"%(value)s. It must be 0 to 255.")
class SecurityGroupMissingIcmpType(nexception.InvalidInput):
message = _("ICMP code (port-range-max) %(value)s is provided"
" but ICMP type (port-range-min) is missing.")
class SecurityGroupInUse(nexception.InUse):
message = _("Security Group %(id)s %(reason)s.")
def __init__(self, **kwargs):
if 'reason' not in kwargs:
kwargs['reason'] = _("in use")
super(SecurityGroupInUse, self).__init__(**kwargs)
class SecurityGroupCannotRemoveDefault(nexception.InUse):
message = _("Insufficient rights for removing default security group.")
class SecurityGroupCannotUpdateDefault(nexception.InUse):
message = _("Updating default security group not allowed.")
class SecurityGroupDefaultAlreadyExists(nexception.InUse):
message = _("Default security group already exists.")
class SecurityGroupRuleInvalidProtocol(nexception.InvalidInput):
message = _("Security group rule protocol %(protocol)s not supported. "
"Only protocol values %(values)s and integer representations "
"[0 to 255] are supported.")
class SecurityGroupRulesNotSingleTenant(nexception.InvalidInput):
message = _("Multiple tenant_ids in bulk security group rule create"
" not allowed")
class SecurityGroupRemoteGroupAndRemoteIpPrefix(nexception.InvalidInput):
message = _("Only remote_ip_prefix or remote_group_id may "
"be provided.")
class SecurityGroupProtocolRequiredWithPorts(nexception.InvalidInput):
message = _("Must also specifiy protocol if port range is given.")
class SecurityGroupNotSingleGroupRules(nexception.InvalidInput):
message = _("Only allowed to update rules for "
"one security profile at a time")
class SecurityGroupNotFound(nexception.NotFound):
message = _("Security group %(id)s does not exist")
class SecurityGroupRuleNotFound(nexception.NotFound):
message = _("Security group rule %(id)s does not exist")
class DuplicateSecurityGroupRuleInPost(nexception.InUse):
message = _("Duplicate Security Group Rule in POST.")
class SecurityGroupRuleExists(nexception.InUse):
message = _("Security group rule already exists. Rule id is %(id)s.")
class SecurityGroupRuleInUse(nexception.InUse):
message = _("Security Group Rule %(id)s %(reason)s.")
def __init__(self, **kwargs):
if 'reason' not in kwargs:
kwargs['reason'] = _("in use")
super(SecurityGroupRuleInUse, self).__init__(**kwargs)
class SecurityGroupRuleParameterConflict(nexception.InvalidInput):
message = _("Conflicting value ethertype %(ethertype)s for CIDR %(cidr)s")
class SecurityGroupConflict(nexception.Conflict):
message = _("Error %(reason)s while attempting the operation.")
def convert_protocol(value):
if value is None:
return
try:
val = int(value)
if val >= 0 and val <= 255:
# Set value of protocol number to string due to bug 1381379,
# PostgreSQL fails when it tries to compare integer with string,
# that exists in db.
return str(value)
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
except (ValueError, TypeError):
if value.lower() in sg_supported_protocols:
return value.lower()
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
except AttributeError:
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
def convert_ethertype_to_case_insensitive(value):
if isinstance(value, basestring):
for ethertype in sg_supported_ethertypes:
if ethertype.lower() == value.lower():
return ethertype
def convert_validate_port_value(port):
if port is None:
return port
try:
val = int(port)
except (ValueError, TypeError):
raise SecurityGroupInvalidPortValue(port=port)
if val >= 0 and val <= 65535:
return val
else:
raise SecurityGroupInvalidPortValue(port=port)
def convert_to_uuid_list_or_none(value_list):
if value_list is None:
return
for sg_id in value_list:
if not uuidutils.is_uuid_like(sg_id):
msg = _("'%s' is not an integer or uuid") % sg_id
raise nexception.InvalidInput(error_message=msg)
return value_list
def convert_ip_prefix_to_cidr(ip_prefix):
if not ip_prefix:
return
try:
cidr = netaddr.IPNetwork(ip_prefix)
return str(cidr)
except (ValueError, TypeError, netaddr.AddrFormatError):
raise nexception.InvalidCIDR(input=ip_prefix)
def _validate_name_not_default(data, valid_values=None):
if data.lower() == "default":
raise SecurityGroupDefaultAlreadyExists()
attr.validators['type:name_not_default'] = _validate_name_not_default
sg_supported_protocols = [None, const.PROTO_NAME_TCP, const.PROTO_NAME_UDP,
const.PROTO_NAME_ICMP, const.PROTO_NAME_ICMP_V6]
sg_supported_ethertypes = ['IPv4', 'IPv6']
# Attribute Map
RESOURCE_ATTRIBUTE_MAP = {
'security_groups': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': '',
'validate': {'type:name_not_default': attr.NAME_MAX_LEN}},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': attr.DESCRIPTION_MAX_LEN},
'is_visible': True, 'default': ''},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'is_visible': True},
'security_group_rules': {'allow_post': False, 'allow_put': False,
'is_visible': True},
},
'security_group_rules': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'security_group_id': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'required_by_policy': True},
'remote_group_id': {'allow_post': True, 'allow_put': False,
'default': None, 'is_visible': True},
'direction': {'allow_post': True, 'allow_put': True,
'is_visible': True,
'validate': {'type:values': ['ingress', 'egress']}},
'protocol': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': None,
'convert_to': convert_protocol},
'port_range_min': {'allow_post': True, 'allow_put': False,
'convert_to': convert_validate_port_value,
'default': None, 'is_visible': True},
'port_range_max': {'allow_post': True, 'allow_put': False,
'convert_to': convert_validate_port_value,
'default': None, 'is_visible': True},
'ethertype': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': 'IPv4',
'convert_to': convert_ethertype_to_case_insensitive,
'validate': {'type:values': sg_supported_ethertypes}},
'remote_ip_prefix': {'allow_post': True, 'allow_put': False,
'default': None, 'is_visible': True,
'convert_to': convert_ip_prefix_to_cidr},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'is_visible': True},
}
}
SECURITYGROUPS = 'security_groups'
EXTENDED_ATTRIBUTES_2_0 = {
'ports': {SECURITYGROUPS: {'allow_post': True,
'allow_put': True,
'is_visible': True,
'convert_to': convert_to_uuid_list_or_none,
'default': attr.ATTR_NOT_SPECIFIED}}}
security_group_quota_opts = [
cfg.IntOpt('quota_security_group',
default=10,
help=_('Number of security groups allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_security_group_rule',
default=100,
help=_('Number of security rules allowed per tenant. '
'A negative value means unlimited.')),
]
cfg.CONF.register_opts(security_group_quota_opts, 'QUOTAS')
class Securitygroup(extensions.ExtensionDescriptor):
"""Security group extension."""
@classmethod
def get_name(cls):
return "security-group"
@classmethod
def get_alias(cls):
return "security-group"
@classmethod
def get_description(cls):
return "The security groups extension."
@classmethod
def get_namespace(cls):
# todo
return "http://docs.openstack.org/ext/securitygroups/api/v2.0"
@classmethod
def get_updated(cls):
return "2012-10-05T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
my_plurals = [(key, key[:-1]) for key in RESOURCE_ATTRIBUTE_MAP.keys()]
attr.PLURALS.update(dict(my_plurals))
exts = []
plugin = manager.NeutronManager.get_plugin()
for resource_name in ['security_group', 'security_group_rule']:
collection_name = resource_name.replace('_', '-') + "s"
params = RESOURCE_ATTRIBUTE_MAP.get(resource_name + "s", dict())
quota.QUOTAS.register_resource_by_name(resource_name)
controller = base.create_resource(collection_name,
resource_name,
plugin, params, allow_bulk=True,
allow_pagination=True,
allow_sorting=True)
ex = extensions.ResourceExtension(collection_name,
controller,
attr_map=params)
exts.append(ex)
return exts
def get_extended_resources(self, version):
if version == "2.0":
return dict(EXTENDED_ATTRIBUTES_2_0.items() +
RESOURCE_ATTRIBUTE_MAP.items())
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class SecurityGroupPluginBase(object):
@abc.abstractmethod
def create_security_group(self, context, security_group):
pass
@abc.abstractmethod
def update_security_group(self, context, id, security_group):
pass
@abc.abstractmethod
def delete_security_group(self, context, id):
pass
@abc.abstractmethod
def get_security_groups(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
@abc.abstractmethod
def get_security_group(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_security_group_rule(self, context, security_group_rule):
pass
@abc.abstractmethod
def delete_security_group_rule(self, context, id):
pass
@abc.abstractmethod
def get_security_group_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
@abc.abstractmethod
def get_security_group_rule(self, context, id, fields=None):
pass
|
yuewko/neutron
|
neutron/extensions/securitygroup.py
|
Python
|
apache-2.0
| 13,464
|
from django.conf import settings
from django.db.models import Sum
from django.db.models.query import F
from django.db.models.functions import Length
from zerver.models import BotConfigData, UserProfile
from typing import List, Dict, Optional
from collections import defaultdict
import os
import configparser
import importlib
class ConfigError(Exception):
pass
def get_bot_config(bot_profile: UserProfile) -> Dict[str, str]:
entries = BotConfigData.objects.filter(bot_profile=bot_profile)
if not entries:
raise ConfigError("No config data available.")
return {entry.key: entry.value for entry in entries}
def get_bot_configs(bot_profile_ids: List[int]) -> Dict[int, Dict[str, str]]:
if not bot_profile_ids:
return {}
entries = BotConfigData.objects.filter(bot_profile_id__in=bot_profile_ids)
entries_by_uid = defaultdict(dict) # type: Dict[int, Dict[str, str]]
for entry in entries:
entries_by_uid[entry.bot_profile_id].update({entry.key: entry.value})
return entries_by_uid
def get_bot_config_size(bot_profile: UserProfile, key: Optional[str]=None) -> int:
if key is None:
return BotConfigData.objects.filter(bot_profile=bot_profile) \
.annotate(key_size=Length('key'), value_size=Length('value')) \
.aggregate(sum=Sum(F('key_size')+F('value_size')))['sum'] or 0
else:
try:
return len(key) + len(BotConfigData.objects.get(bot_profile=bot_profile, key=key).value)
except BotConfigData.DoesNotExist:
return 0
def set_bot_config(bot_profile: UserProfile, key: str, value: str) -> None:
config_size_limit = settings.BOT_CONFIG_SIZE_LIMIT
old_entry_size = get_bot_config_size(bot_profile, key)
new_entry_size = len(key) + len(value)
old_config_size = get_bot_config_size(bot_profile)
new_config_size = old_config_size + (new_entry_size - old_entry_size)
if new_config_size > config_size_limit:
raise ConfigError("Cannot store configuration. Request would require {} characters. "
"The current configuration size limit is {} characters.".format(new_config_size,
config_size_limit))
obj, created = BotConfigData.objects.get_or_create(bot_profile=bot_profile, key=key,
defaults={'value': value})
if not created:
obj.value = value
obj.save()
def load_bot_config_template(bot: str) -> Dict[str, str]:
bot_module_name = 'zulip_bots.bots.{}'.format(bot)
bot_module = importlib.import_module(bot_module_name)
bot_module_path = os.path.dirname(bot_module.__file__)
config_path = os.path.join(bot_module_path, '{}.conf'.format(bot))
if os.path.isfile(config_path):
config = configparser.ConfigParser()
with open(config_path) as conf:
config.readfp(conf) # type: ignore # readfp->read_file in python 3, so not in stubs
return dict(config.items(bot))
else:
return dict()
|
jackrzhang/zulip
|
zerver/lib/bot_config.py
|
Python
|
apache-2.0
| 3,149
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nvp_portmap
Revision ID: 38335592a0dc
Revises: 49332180ca96
Create Date: 2013-01-15 06:04:56.328991
"""
# revision identifiers, used by Alembic.
revision = '38335592a0dc'
down_revision = '49332180ca96'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'neutron_nvp_port_mapping',
sa.Column('neutron_id', sa.String(length=36), nullable=False),
sa.Column('nvp_id', sa.String(length=36), nullable=True),
sa.ForeignKeyConstraint(['neutron_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('neutron_id'))
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('neutron_nvp_port_mapping')
|
ntt-sic/neutron
|
neutron/db/migration/alembic_migrations/versions/38335592a0dc_nvp_portmap.py
|
Python
|
apache-2.0
| 1,775
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Helper script to apply pull requests from github.com to local hg working copy.
Example:
cd jython-hg-workdir
apply-github-pull-request.py 4
'''
import argparse
import getpass
import os
import requests
import subprocess
import sys
import tempfile
def get_pull_url(repo, pr_id):
return 'https://github.com/{}/pull/{}'.format(repo, pr_id)
def get_added_files(diff):
'''hacky approach to extract added files from github diff output'''
prefix = '+++ b/'
lastline = None
for line in diff.splitlines():
line = line.strip()
if line.startswith(prefix) and lastline and lastline == '--- /dev/null':
yield line[len(prefix):]
lastline = line
def main(args):
if not os.path.exists('.hg'):
print 'ERROR: No .hg folder found.'
print 'Please make sure you run this script from within your local hg.python.org/jython checkout.'
return 1
password = args.github_password
if password and password.startswith('@'):
# if the command line password starts with "@", we read it from a file
# (this prevents exposing the password on the command line / bash history)
with open(password[1:]) as fd:
password = fd.read().strip()
if not password:
password = getpass.getpass('{}@github.com: '.format(args.github_user))
from requests.auth import HTTPBasicAuth
auth = HTTPBasicAuth(args.github_user, password)
r = requests.get('https://api.github.com/repos/{}/pulls/{}'.format(args.github_repo, args.pull_request_id),
auth=auth)
if r.status_code != 200:
print 'ERROR:'
print r.json()
return 1
data = r.json()
r = requests.get('https://api.github.com/users/{}'.format(data['user']['login']), auth=auth)
if r.status_code != 200:
print 'ERROR:'
print r.json()
return 1
user_data = r.json()
commiter = '{} <{}>'.format(user_data['name'], user_data['email'])
print 'Pull Request {} by {}:'.format(data['number'], commiter)
print data['title']
print data['body']
print '-' * 40
r = requests.get('{}.diff'.format(get_pull_url(args.github_repo, args.pull_request_id)))
patch_contents = r.text
print patch_contents
added_files = set(get_added_files(patch_contents))
with tempfile.NamedTemporaryFile(suffix='-github-patch-{}'.format(args.pull_request_id)) as fd:
fd.write(patch_contents)
fd.flush()
cmd = [
'patch',
'-p1',
'--batch',
'--forward',
'-i',
fd.name,
]
if args.dry_run:
cmd.append('--dry-run')
p = subprocess.Popen(cmd)
p.communicate()
print '-' * 40
print 'Applied pull request {} into your current working directory.'.format(args.pull_request_id)
print 'Please check the changes (run unit tests) and commit...'
print '-' * 40
for fn in sorted(added_files):
print 'hg add {}'.format(fn)
print 'hg ci -u "{}" -m "{} ({})"'.format(commiter, data['title'], get_pull_url(args.github_repo,
args.pull_request_id))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--github-repo', default='jythontools/jython')
parser.add_argument('-u', '--github-user', default=getpass.getuser())
parser.add_argument('-p', '--github-password',
help='Your github password (you can use "@" to read the password from a file)')
parser.add_argument('--dry-run', action='store_true', help='Dry-run mode: only show what would be done')
parser.add_argument('pull_request_id', help='Pull request ID on github.com')
args = parser.parse_args()
sys.exit(main(args))
|
p4datasystems/CarnotKE
|
jyhton/Misc/apply-github-pull-request.py
|
Python
|
apache-2.0
| 3,868
|
#! /usr/bin/env python
import os
import sys
from barf.barf import BARF
if __name__ == "__main__":
#
# Open file
#
try:
filename = os.path.abspath("../../samples/toy/x86/branch4")
barf = BARF(filename)
except Exception as err:
print err
print "[-] Error opening file : %s" % filename
sys.exit(1)
#
# Translate to REIL
#
print("[+] Translating x86 to REIL...")
for addr, asm_instr, reil_instrs in barf.translate():
print("0x{0:08x} : {1}".format(addr, asm_instr))
for reil_instr in reil_instrs:
print("{0:14}{1}".format("", reil_instr))
|
ignaeche/barf-project
|
barf/examples/x86/translate_reil.py
|
Python
|
bsd-2-clause
| 651
|
# -*- encoding: utf-8 -*-
"""
Tests for django.core.servers.
"""
from __future__ import unicode_literals
import os
import socket
from django.core.exceptions import ImproperlyConfigured
from django.test import LiveServerTestCase
from django.test import override_settings
from django.utils.http import urlencode
from django.utils.six.moves.urllib.error import HTTPError
from django.utils.six.moves.urllib.request import urlopen
from django.utils._os import upath
from .models import Person
TEST_ROOT = os.path.dirname(upath(__file__))
TEST_SETTINGS = {
'MEDIA_URL': '/media/',
'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'),
'STATIC_URL': '/static/',
'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'),
}
class LiveServerBase(LiveServerTestCase):
available_apps = [
'servers',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
fixtures = ['testdata.json']
urls = 'servers.urls'
@classmethod
def setUpClass(cls):
# Override settings
cls.settings_override = override_settings(**TEST_SETTINGS)
cls.settings_override.enable()
super(LiveServerBase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
# Restore original settings
cls.settings_override.disable()
super(LiveServerBase, cls).tearDownClass()
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class LiveServerAddress(LiveServerBase):
"""
Ensure that the address set in the environment variable is valid.
Refs #2879.
"""
@classmethod
def setUpClass(cls):
# Backup original environment variable
address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ
old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS')
# Just the host is not accepted
cls.raises_exception('localhost', ImproperlyConfigured)
# The host must be valid
cls.raises_exception('blahblahblah:8081', socket.error)
# The list of ports must be in a valid format
cls.raises_exception('localhost:8081,', ImproperlyConfigured)
cls.raises_exception('localhost:8081,blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-', ImproperlyConfigured)
cls.raises_exception('localhost:8081-blah', ImproperlyConfigured)
cls.raises_exception('localhost:8081-8082-8083', ImproperlyConfigured)
# Restore original environment variable
if address_predefined:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address
else:
del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS']
@classmethod
def tearDownClass(cls):
# skip it, as setUpClass doesn't call its parent either
pass
@classmethod
def raises_exception(cls, address, exception):
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address
try:
super(LiveServerAddress, cls).setUpClass()
raise Exception("The line above should have raised an exception")
except exception:
pass
finally:
super(LiveServerAddress, cls).tearDownClass()
def test_test_test(self):
# Intentionally empty method so that the test is picked up by the
# test runner and the overridden setUpClass() method is executed.
pass
class LiveServerViews(LiveServerBase):
def test_404(self):
"""
Ensure that the LiveServerTestCase serves 404s.
Refs #2879.
"""
try:
self.urlopen('/')
except HTTPError as err:
self.assertEqual(err.code, 404, 'Expected 404 response')
else:
self.fail('Expected 404 response')
def test_view(self):
"""
Ensure that the LiveServerTestCase serves views.
Refs #2879.
"""
f = self.urlopen('/example_view/')
self.assertEqual(f.read(), b'example view')
def test_static_files(self):
"""
Ensure that the LiveServerTestCase serves static files.
Refs #2879.
"""
f = self.urlopen('/static/example_static_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file')
def test_no_collectstatic_emulation(self):
"""
Test that LiveServerTestCase reports a 404 status code when HTTP client
tries to access a static file that isn't explictly put under
STATIC_ROOT.
"""
try:
self.urlopen('/static/another_app/another_app_static_file.txt')
except HTTPError as err:
self.assertEqual(err.code, 404, 'Expected 404 response')
else:
self.fail('Expected 404 response (got %d)' % err.code)
def test_media_files(self):
"""
Ensure that the LiveServerTestCase serves media files.
Refs #2879.
"""
f = self.urlopen('/media/example_media_file.txt')
self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file')
def test_environ(self):
f = self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'}))
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Ensure that fixtures are properly loaded and visible to the
live server thread.
Refs #2879.
"""
f = self.urlopen('/model_view/')
self.assertEqual(f.read().splitlines(), [b'jane', b'robert'])
def test_database_writes(self):
"""
Ensure that data written to the database by a view can be read.
Refs #2879.
"""
self.urlopen('/create_model_instance/')
self.assertQuerysetEqual(
Person.objects.all().order_by('pk'),
['jane', 'robert', 'emily'],
lambda b: b.name
)
|
deployed/django
|
tests/servers/tests.py
|
Python
|
bsd-3-clause
| 5,943
|
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
#import numpy as np
#import Tango
#from base_plots import gpplot, x_frame1D, x_frame2D
from . import plotting_library as pl
def plot_optimizer(optimizer, **kwargs):
if optimizer.trace == None:
print("No trace present so I can't plot it. Please check that the optimizer actually supplies a trace.")
else:
canvas, kwargs = pl().new_canvas(**kwargs)
plots = dict(trace=pl().plot(range(len(optimizer.trace)), optimizer.trace))
return pl().add_to_canvas(canvas, plots, xlabel='Iteration', ylabel='f(x)')
def plot_sgd_traces(optimizer):
figure = pl().figure(2,1)
canvas, _ = pl().new_canvas(figure, 1, 1, title="Parameters")
plots = dict(lines=[])
for k in optimizer.param_traces.keys():
plots['lines'].append(pl().plot(canvas, range(len(optimizer.param_traces[k])), optimizer.param_traces[k], label=k))
pl().add_to_canvas(canvas, legend=True)
canvas, _ = pl().new_canvas(figure, 1, 2, title="Objective function")
pl().plot(canvas, range(len(optimizer.fopt_trace)), optimizer.fopt_trace)
return pl().add_to_canvas(canvas, plots, legend=True)
|
befelix/GPy
|
GPy/plotting/gpy_plot/inference_plots.py
|
Python
|
bsd-3-clause
| 1,233
|
#!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
''' Base class for postprocessing of RC files.
'''
import sys
class PostProcessor(object):
''' Base class for postprocessing of the RC file data before being
output through the RC2GRD tool. You should implement this class if
you want GRIT to do specific things to the RC files after it has
converted the data into GRD format, i.e. change the content of the
RC file, and put it into a P4 changelist, etc.'''
def Process(self, rctext, rcpath, grdnode):
''' Processes the data in rctext and grdnode.
Args:
rctext: string containing the contents of the RC file being processed.
rcpath: the path used to access the file.
grdtext: the root node of the grd xml data generated by
the rc2grd tool.
Return:
The root node of the processed GRD tree.
'''
raise NotImplementedError()
|
meego-tablet-ux/meego-app-browser
|
tools/grit/grit/tool/postprocess_interface.py
|
Python
|
bsd-3-clause
| 1,033
|
import ocl as cam
import camvtk
import time
import vtk
import math
import datetime
red= (1,0,0)
green= (0,1,0)
blue= (0,0,1)
cyan= (0,1,1)
yellow= (1,1,0)
pink = ( float(255)/255,float(192)/255,float(203)/255)
grey = ( float(127)/255,float(127)/255,float(127)/255)
orange = ( float(255)/255,float(165)/255,float(0)/255)
def ccColor2(cc):
""" this function returns a different color depending on the type of
the CC-point. Useful for visualizing CL or CC points """
if cc.type==cam.CCType.FACET:
col = (1,0,1)
elif cc.type == cam.CCType.VERTEX:
col = (1,1,0)
elif cc.type == cam.CCType.EDGE:
col = (0,1,1)
elif cc.type == cam.CCType.NONE:
col = (1,1,1)
elif cc.type == cam.CCType.ERROR:
col = (0,0.5,1)
return col
class OffsetEllipse():
"""
// (s, t) where: s^2 + t^2 = 1
// point of ellipse is: ecen + j s + n t
// tangent at point is: -j t + n s
// normal at point is: j (s / eccen) + n (t * eccen)
// point on offset-ellipse: point on ellipse + offrad*normal
"""
def __init__(self, ecen, a, b, ofs):
self.a = a
self.b = b
self.ecen = ecen
self.ofs = ofs
def ePoint(self,epos):
# return a point on the ellipse
p = cam.Point()
p.x = self.ecen.x + self.a*epos.s
p.y = self.ecen.y + self.b*epos.t
return p
def oePoint(self,epos):
# return point on offset-ellipse
p = self.ePoint(epos)
normal = self.eNorm(epos)
p.x = p.x + self.ofs*normal.x
p.y = p.y + self.ofs*normal.y
return p
def eNorm(self, epos):
normal = cam.Point( self.b*epos.s, self.a*epos.t, 0)
normal.normalize()
return normal
def eTang(self, epos):
p = cam.Point(-self.a*epos.t, self.b*epos.s, 0)
p.normalize()
return p
def error(self,epos,cl):
p1 = self.oePoint(epos)
p2 = cl
dx = p1.x - cl.x
dy = p1.y - cl.y
#return dx*dx+dy*dy
return dy
class EPos():
"""
// (s, t) where: s^2 + t^2 = 1
// point of ellipse is: ecen + j s + n t
// tangent at point is: -j t + n s
// normal at point is: j (s / eccen) + n (t * eccen)
// point on offset-ellipse: point on ellipse + offrad*normal
"""
def __init__(self):
self.t = 1
self.s = 0
self.sett(self.t,1)
def sets(self, s, side):
if s > 1.0:
s = 1.0
if s < -1.0:
s = -1.0
self.s = s
ssq = s*s
tsq = 1 - ssq
if side == 1:
self.t = 1 * math.sqrt(tsq)
else:
self.t = -1 * math.sqrt(tsq)
def sett(self, t, side):
if t > 1.0:
t = 1.0
if t < -1.0:
t = -1.0
self.t = t
tsq = t*t
ssq = 1 - tsq
if side == 1:
self.s = 1 * math.sqrt(ssq)
else:
self.s = -1* math.sqrt(ssq)
def stepTang(self, ellipse, delta):
tang = oe.eTang(self)
#print " epos= (", self.s," , ", self.t , " )"
#print "steptang tang=", tang.str()
if abs(tang.x) > abs(tang.y):
#print "s-dir step"
news = self.s + delta*tang.x
if self.t > 0:
self.sets(news,1)
else:
self.sets(news,0)
else:
#print "t-dir step"
newt = self.t + delta*tang.y
if self.s>0:
self.sett( newt,1)
else:
self.sett( newt,0)
if __name__ == "__main__":
myscreen = camvtk.VTKScreen()
myscreen.camera.SetPosition(5, 3, 2)
myscreen.camera.SetFocalPoint(1.38,1, 0)
a=cam.Point(3,2,-2)
b=cam.Point(-1,2,3)
myscreen.addActor(camvtk.Point(center=(a.x,a.y,a.z), color=(1,0,1)));
myscreen.addActor(camvtk.Point(center=(b.x,b.y,b.z), color=(1,0,1)));
#c=cam.Point(0,0,0.3)
myscreen.addActor( camvtk.Line(p1=(a.x,a.y,a.z),p2=(b.x,b.y,b.z)) )
#t = cam.Triangle(a,b,c)
cutter = cam.BullCutter(1,0.2)
print cutter.str()
xar = camvtk.Arrow(color=red, rotXYZ=(0,0,0))
myscreen.addActor(xar)
yar = camvtk.Arrow(color=green, rotXYZ=(0,0,90))
myscreen.addActor(yar)
zar = camvtk.Arrow(color=blue, rotXYZ=(0,-90,0))
myscreen.addActor(zar)
cl = cam.Point(2.193, 1, 0)
radius1=1
radius2=0.25
tor = camvtk.Toroid(r1=radius1, r2=radius2, center=(cl.x, cl.y, cl.z),rotXYZ=(0,0,0))
#tor.SetWireframe()
#myscreen.addActor(tor)
cyl = camvtk.Cylinder(center=(cl.x,cl.y,cl.z) , radius=radius1, height=2, color=(0,1,1),
rotXYZ=(90,0,0), resolution=50 )
#myscreen.addActor(cyl)
tube = camvtk.Tube(p1=(a.x,a.y,a.z),p2=(b.x,b.y,b.z),color=(1,1,0))
tube.SetOpacity(0.2)
#myscreen.addActor(tube)
cir= camvtk.Circle(radius=radius1, center=(cl.x,cl.y,cl.z), color=yellow)
myscreen.addActor(cir)
clp = camvtk.Point(center=(cl.x,cl.y,cl.z))
myscreen.addActor(clp)
# short axis of ellipse = radius2
# long axis of ellipse = radius2/sin(theta)
# where theta is the slope of the line
dx = b.x - a.x
dz = b.z - a.z
#print "dx=", dx
#print "dz=", dz
theta = math.atan(dz/dx)
#print "theta=",theta
a = abs( radius2/math.sin(theta) )
#print "a=", a, " = ", a/radius2,"* radius2"
# ellipse
#a=2
b=radius2
#print "b= ", b
ecen_tmp=cam.Point(1.38,2,0)
resolution=50
for n in xrange(0,resolution):
angle1= (float(n)/float(resolution))*2*math.pi
angle2= (float(n+1)/float(resolution))*2*math.pi
x=ecen_tmp.x + a*math.cos(angle1)
y=ecen_tmp.y + b*math.sin(angle1)
x2=ecen_tmp.x + a*math.cos(angle2)
y2=ecen_tmp.y + b*math.sin(angle2)
#myscreen.addActor(camvtk.Point(center=(x,y,0), color=(1,0,1)))
#myscreen.addActor( camvtk.Line(p1=(x,y,0),p2=(x2,y2,0)) )
oe = OffsetEllipse(ecen_tmp, a, b, radius1)
nmax=20
delta=0.05
td = 1
epos1 = EPos()
epos2 = EPos()
epos3 = EPos()
epos4 = EPos()
epos5 = EPos()
for n in xrange(0,nmax):
s = float(n)/float(nmax-1) * float(2)/math.sqrt(2) - float(1)/math.sqrt(2)
t = float(n)/float(nmax-1) * float(2)/math.sqrt(2) - float(1)/math.sqrt(2)
epos1.sets(s,1)
epos2.sets(s,0)
epos3.sett(t,1)
epos4.sett(t,0)
p1 = oe.ePoint(epos1)
p2 = oe.ePoint(epos2)
p3 = oe.ePoint(epos3)
p4 = oe.ePoint(epos4)
p5 = oe.ePoint(epos5)
p1o = oe.oePoint(epos1)
p2o = oe.oePoint(epos2)
#print "s=", s, "t=", t," epoint=", p1.str()
myscreen.addActor(camvtk.Point(center=(p1.x,p1.y,0), color=green)) # green steps along s (side=1)
myscreen.addActor(camvtk.Point(center=(p2.x,p2.y,0), color=red)) # red steps along s (side=0)
myscreen.addActor(camvtk.Point(center=(p3.x,p3.y,0), color=orange)) # orange steps along t (side=1)
myscreen.addActor(camvtk.Point(center=(p4.x,p4.y,0), color=pink)) # pink steps along t (side=0)
#myscreen.addActor(camvtk.Sphere(center=(p5.x,p5.y,0), radius=0.02, color=red))
#myscreen.addActor(camvtk.Point(center=(p1o.x,p1o.y,0), color=green))
#myscreen.addActor(camvtk.Point(center=(p2o.x,p2o.y,0), color=red))
"""
tx = float(n)/float(nmax-1) * 2 - 1
sx = oe.seval(tx, 1)
sx2 = oe.seval(tx, 0)
p3 = oe.ePoint(sx,tx)
p4 = oe.ePoint(sx2,tx)
#myscreen.addActor(camvtk.Point(center=(p3.x,p3.y,0), color=orange))
#myscreen.addActor(camvtk.Point(center=(p4.x,p4.y,0), color=pink))
sd = oe.seval(td,1)
p5 = oe.ePoint(td,sd)
myscreen.addActor(camvtk.Point(center=(p5.x,p5.y,0), color=orange))
"""
myscreen.render()
#time.sleep(0.05)
t = camvtk.Text()
t.SetPos( (myscreen.width-450, myscreen.height-30) )
myscreen.addActor( t)
t2 = camvtk.Text()
t2.SetPos( (50, myscreen.height-150) )
myscreen.addActor( t2)
epos5.sets(0.5,1)
Nsteps=10
endcondition = 0
n = 1
NRStep=0.1
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(myscreen.renWin)
lwr = vtk.vtkPNGWriter()
lwr.SetInput( w2if.GetOutput() )
while not endcondition:
#for n in xrange(0,Nsteps):
t.SetText("OpenCAMLib 10.03-beta, " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
current_error = oe.error(epos5, cl)
print "current error=", current_error
epos_tmp = EPos()
epos_tmp.s = epos5.s
epos_tmp.t = epos5.t
# take a small step, to determine rerivative:
dt = 0.2*NRStep
epos_tmp.stepTang(oe,dt)
new_error = oe.error(epos_tmp, cl)
print "new_error=", new_error
deriv = (new_error-current_error)/dt
print "derivative = ", deriv
paramtext = "(s, t) = (%3.3f, %3.3f)\n NR iteration # = %i \n error= %3.9f\n de=%3.3f" % (epos5.s, epos5.t, n, current_error,deriv)
t2.SetText(paramtext)
# take Newton rhapson step
NRStep = (-current_error/deriv)
print " step=", NRStep
#NRStep=0.05 # debug/demo
epos5.stepTang(oe, NRStep)
p5 = oe.ePoint(epos5)
esphere = camvtk.Sphere(center=(p5.x,p5.y,0), radius=0.02, color=red)
myscreen.addActor(esphere)
p6 = oe.eTang(epos5)
p7 = oe.oePoint(epos5)
oesphere = camvtk.Sphere(center=(p7.x,p7.y,p7.z), radius=0.02, color=green)
tangline = camvtk.Line(p1=(p5.x,p5.y,p5.z),p2=(p5.x+p6.x,p5.y+p6.y,p5.z+p6.z))
normline = camvtk.Line(p1=(p5.x,p5.y,p5.z),p2=(p7.x,p7.y,p7.z), color=yellow)
myscreen.addActor( tangline )
myscreen.addActor( normline )
myscreen.addActor( oesphere )
myscreen.render()
time.sleep(0.5)
if abs(current_error) < 1e-8:
endcondition=1
if n>125:
endcondition=1
if not endcondition:
myscreen.removeActor(esphere)
myscreen.removeActor(tangline)
myscreen.removeActor(normline)
myscreen.removeActor(oesphere)
w2if.Modified()
#lwr.SetFileName("5_all.png")
"""
for i in xrange(0,10):
lwr.SetFileName("frames/oe_nrx"+ ('%05d%02d' % (n,i))+".png")
lwr.Write()
"""
n=n+1
print "rendering...",
#for cl,cc in zip(clpoints,ccpoints):
# myscreen.addActor( camvtk.Point(center=(cl.x,cl.y,cl.z) , color=ccColor(cc) ) )
# if cc.type != cam.CCType.NONE: # only render interesting cc-points
# myscreen.addActor( camvtk.Point(center=(cc.x,cc.y,cc.z) , color=ccColor2(cc) ) )
print "done."
myscreen.render()
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(myscreen.renWin)
lwr = vtk.vtkPNGWriter()
lwr.SetInput( w2if.GetOutput() )
w2if.Modified()
lwr.SetFileName("5_all.png")
#lwr.Write()
myscreen.iren.Start()
#raw_input("Press Enter to terminate")
|
AlanZatarain/opencamlib
|
scripts/offset-ellipse/oellipse2_tst.py
|
Python
|
gpl-3.0
| 11,549
|
import binascii
import json
import warnings
from collections import Mapping
from .algorithms import (
Algorithm, get_default_algorithms, has_crypto, requires_cryptography # NOQA
)
from .compat import binary_type, string_types, text_type
from .exceptions import (
DecodeError, InvalidAlgorithmError, InvalidSignatureError,
InvalidTokenError
)
from .utils import base64url_decode, base64url_encode, force_bytes, merge_dict
class PyJWS(object):
header_typ = 'JWT'
def __init__(self, algorithms=None, options=None):
self._algorithms = get_default_algorithms()
self._valid_algs = (set(algorithms) if algorithms is not None
else set(self._algorithms))
# Remove algorithms that aren't on the whitelist
for key in list(self._algorithms.keys()):
if key not in self._valid_algs:
del self._algorithms[key]
if not options:
options = {}
self.options = merge_dict(self._get_default_options(), options)
@staticmethod
def _get_default_options():
return {
'verify_signature': True
}
def register_algorithm(self, alg_id, alg_obj):
"""
Registers a new Algorithm for use when creating and verifying tokens.
"""
if alg_id in self._algorithms:
raise ValueError('Algorithm already has a handler.')
if not isinstance(alg_obj, Algorithm):
raise TypeError('Object is not of type `Algorithm`')
self._algorithms[alg_id] = alg_obj
self._valid_algs.add(alg_id)
def unregister_algorithm(self, alg_id):
"""
Unregisters an Algorithm for use when creating and verifying tokens
Throws KeyError if algorithm is not registered.
"""
if alg_id not in self._algorithms:
raise KeyError('The specified algorithm could not be removed'
' because it is not registered.')
del self._algorithms[alg_id]
self._valid_algs.remove(alg_id)
def get_algorithms(self):
"""
Returns a list of supported values for the 'alg' parameter.
"""
return list(self._valid_algs)
def encode(self, payload, key, algorithm='HS256', headers=None,
json_encoder=None):
segments = []
if algorithm is None:
algorithm = 'none'
if algorithm not in self._valid_algs:
pass
# Header
header = {'typ': self.header_typ, 'alg': algorithm}
if headers:
self._validate_headers(headers)
header.update(headers)
json_header = force_bytes(
json.dumps(
header,
separators=(',', ':'),
cls=json_encoder
)
)
segments.append(base64url_encode(json_header))
segments.append(base64url_encode(payload))
# Segments
signing_input = b'.'.join(segments)
try:
alg_obj = self._algorithms[algorithm]
key = alg_obj.prepare_key(key)
signature = alg_obj.sign(signing_input, key)
except KeyError:
if not has_crypto and algorithm in requires_cryptography:
raise NotImplementedError(
"Algorithm '%s' could not be found. Do you have cryptography "
"installed?" % algorithm
)
else:
raise NotImplementedError('Algorithm not supported')
segments.append(base64url_encode(signature))
return b'.'.join(segments)
def decode(self, jws, key='', verify=True, algorithms=None, options=None,
**kwargs):
merged_options = merge_dict(self.options, options)
verify_signature = merged_options['verify_signature']
if verify_signature and not algorithms:
warnings.warn(
'It is strongly recommended that you pass in a ' +
'value for the "algorithms" argument when calling decode(). ' +
'This argument will be mandatory in a future version.',
DeprecationWarning
)
payload, signing_input, header, signature = self._load(jws)
if not verify:
warnings.warn('The verify parameter is deprecated. '
'Please use verify_signature in options instead.',
DeprecationWarning, stacklevel=2)
elif verify_signature:
self._verify_signature(payload, signing_input, header, signature,
key, algorithms)
return payload
def get_unverified_header(self, jwt):
"""Returns back the JWT header parameters as a dict()
Note: The signature is not verified so the header parameters
should not be fully trusted until signature verification is complete
"""
headers = self._load(jwt)[2]
self._validate_headers(headers)
return headers
def _load(self, jwt):
if isinstance(jwt, text_type):
jwt = jwt.encode('utf-8')
if not issubclass(type(jwt), binary_type):
raise DecodeError("Invalid token type. Token must be a {0}".format(
binary_type))
try:
signing_input, crypto_segment = jwt.rsplit(b'.', 1)
header_segment, payload_segment = signing_input.split(b'.', 1)
except ValueError:
raise DecodeError('Not enough segments')
try:
header_data = base64url_decode(header_segment)
except (TypeError, binascii.Error):
raise DecodeError('Invalid header padding')
try:
header = json.loads(header_data.decode('utf-8'))
except ValueError as e:
raise DecodeError('Invalid header string: %s' % e)
if not isinstance(header, Mapping):
raise DecodeError('Invalid header string: must be a json object')
try:
payload = base64url_decode(payload_segment)
except (TypeError, binascii.Error):
raise DecodeError('Invalid payload padding')
try:
signature = base64url_decode(crypto_segment)
except (TypeError, binascii.Error):
raise DecodeError('Invalid crypto padding')
return (payload, signing_input, header, signature)
def _verify_signature(self, payload, signing_input, header, signature,
key='', algorithms=None):
alg = header.get('alg')
if algorithms is not None and alg not in algorithms:
raise InvalidAlgorithmError('The specified alg value is not allowed')
try:
alg_obj = self._algorithms[alg]
key = alg_obj.prepare_key(key)
if not alg_obj.verify(signing_input, key, signature):
raise InvalidSignatureError('Signature verification failed')
except KeyError:
raise InvalidAlgorithmError('Algorithm not supported')
def _validate_headers(self, headers):
if 'kid' in headers:
self._validate_kid(headers['kid'])
def _validate_kid(self, kid):
if not isinstance(kid, string_types):
raise InvalidTokenError('Key ID header parameter must be a string')
_jws_global_obj = PyJWS()
encode = _jws_global_obj.encode
decode = _jws_global_obj.decode
register_algorithm = _jws_global_obj.register_algorithm
unregister_algorithm = _jws_global_obj.unregister_algorithm
get_unverified_header = _jws_global_obj.get_unverified_header
|
dfalt974/SickRage
|
lib/jwt/api_jws.py
|
Python
|
gpl-3.0
| 7,599
|
"""Make sure that Mysa Living is enumerated properly."""
from homeassistant.helpers import device_registry as dr, entity_registry as er
from tests.components.homekit_controller.common import (
Helper,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_mysa_living_setup(hass):
"""Test that the accessory can be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "mysa_living.json")
config_entry, pairing = await setup_test_accessories(hass, accessories)
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
# Check that the switch entity is handled correctly
entry = entity_registry.async_get("sensor.mysa_85dda9_current_humidity")
assert entry.unique_id == "homekit-AAAAAAA000-aid:1-sid:20-cid:27"
helper = Helper(
hass,
"sensor.mysa_85dda9_current_humidity",
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == "Mysa-85dda9 - Current Humidity"
device = device_registry.async_get(entry.device_id)
assert device.manufacturer == "Empowered Homes Inc."
assert device.name == "Mysa-85dda9"
assert device.model == "v1"
assert device.sw_version == "2.8.1"
assert device.via_device_id is None
# Assert the humidifier is detected
entry = entity_registry.async_get("sensor.mysa_85dda9_current_temperature")
assert entry.unique_id == "homekit-AAAAAAA000-aid:1-sid:20-cid:25"
helper = Helper(
hass,
"sensor.mysa_85dda9_current_temperature",
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == "Mysa-85dda9 - Current Temperature"
# The sensor should be part of the same device
assert entry.device_id == device.id
# Assert the light is detected
entry = entity_registry.async_get("light.mysa_85dda9")
assert entry.unique_id == "homekit-AAAAAAA000-40"
helper = Helper(
hass,
"light.mysa_85dda9",
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == "Mysa-85dda9"
# The light should be part of the same device
assert entry.device_id == device.id
# Assert the climate entity is detected
entry = entity_registry.async_get("climate.mysa_85dda9")
assert entry.unique_id == "homekit-AAAAAAA000-20"
helper = Helper(
hass,
"climate.mysa_85dda9",
pairing,
accessories[0],
config_entry,
)
state = await helper.poll_and_get_state()
assert state.attributes["friendly_name"] == "Mysa-85dda9"
# The light should be part of the same device
assert entry.device_id == device.id
|
lukas-hetzenecker/home-assistant
|
tests/components/homekit_controller/specific_devices/test_mysa_living.py
|
Python
|
apache-2.0
| 2,910
|
from __future__ import unicode_literals
import time
import warnings
from datetime import datetime, timedelta
from StringIO import StringIO
from django.conf import settings
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
from django.test.utils import str_prefix
from django.utils import unittest
from django.utils.http import cookie_date
from django.utils.timezone import utc
class RequestsTests(unittest.TestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(request.META.keys(), [])
def test_httprequest_repr(self):
request = HttpRequest()
request.path = '/somepath/'
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_wsgirequest(self):
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': StringIO('')})
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
def test_wsgirequest_repr(self):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': StringIO('')})
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_parse_cookie(self):
self.assertEqual(parse_cookie('invalid:key=true'), {})
def test_httprequest_location(self):
request = HttpRequest()
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
'https://www.example.com/asdf')
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
'http://www.example.com/path/with:colons')
def test_http_get_host(self):
old_USE_X_FORWARDED_HOST = settings.USE_X_FORWARDED_HOST
try:
settings.USE_X_FORWARDED_HOST = False
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), 'example.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
finally:
settings.USE_X_FORWARDED_HOST = old_USE_X_FORWARDED_HOST
def test_http_get_host_with_x_forwarded_host(self):
old_USE_X_FORWARDED_HOST = settings.USE_X_FORWARDED_HOST
try:
settings.USE_X_FORWARDED_HOST = True
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), 'forward.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
finally:
settings.USE_X_FORWARDED_HOST = old_USE_X_FORWARDED_HOST
def test_near_expiration(self):
"Cookie will expire when an near expiration time is provided"
response = HttpResponse()
# There is a timing weakness in this test; The
# expected result for max-age requires that there be
# a very slight difference between the evaluated expiration
# time, and the time evaluated in set_cookie(). If this
# difference doesn't exist, the cookie time will be
# 1 second larger. To avoid the problem, put in a quick sleep,
# which guarantees that there will be a time difference.
expires = datetime.utcnow() + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_aware_expiration(self):
"Cookie accepts an aware datetime as expiration time"
response = HttpResponse()
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_far_expiration(self):
"Cookie will expire when an distant expiration time is provided"
response = HttpResponse()
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
def test_max_age_expiration(self):
"Cookie will expire if max_age is provided"
response = HttpResponse()
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
def test_httponly_cookie(self):
response = HttpResponse()
response.set_cookie('example', httponly=True)
example_cookie = response.cookies['example']
# A compat cookie may be in use -- check that it has worked
# both as an output string, and using the cookie attributes
self.assertTrue('; httponly' in str(example_cookie))
self.assertTrue(example_cookie['httponly'])
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(StringIO(b'test'), 2)
self.assertEqual(stream.read(), b'te')
# Reading again returns nothing.
self.assertEqual(stream.read(), b'')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(StringIO(b'test'), 2)
self.assertEqual(stream.read(5), b'te')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read sequentially from a stream
stream = LimitedStream(StringIO(b'12345678'), 8)
self.assertEqual(stream.read(5), b'12345')
self.assertEqual(stream.read(5), b'678')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read lines from a stream
stream = LimitedStream(StringIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b'1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b'56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), b'78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), b'\n')
# Read everything else.
self.assertEqual(stream.readline(), b'ijkl')
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(StringIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.readline(10), b'1234\n')
self.assertEqual(stream.readline(3), b'abc')
# Now expire the available characters
self.assertEqual(stream.readline(3), b'd')
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b'')
# Same test, but with read, not readline.
stream = LimitedStream(StringIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.read(6), b'1234\na')
self.assertEqual(stream.read(2), b'bc')
self.assertEqual(stream.read(2), b'd')
self.assertEqual(stream.read(2), b'')
self.assertEqual(stream.read(), b'')
def test_stream(self):
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
self.assertEqual(request.read(), b'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
self.assertEqual(request.POST, {'name': ['value']})
self.assertEqual(request.body, b'name=value')
self.assertEqual(request.read(), b'name=value')
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
self.assertEqual(request.read(2), b'na')
self.assertRaises(Exception, lambda: request.body)
self.assertEqual(request.POST, {})
def test_body_after_POST_multipart(self):
"""
Reading body after parsing multipart is not allowed
"""
# Because multipart is used for large amounts fo data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = "\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']).encode('utf-8')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
self.assertEqual(request.POST, {'name': ['value']})
self.assertRaises(Exception, lambda: request.body)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
"""
# According to:
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# Every request.POST with Content-Length >= 0 is a valid request,
# this test ensures that we handle Content-Length == 0.
payload = "\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']).encode('utf-8')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': 0,
'wsgi.input': StringIO(payload)})
self.assertEqual(request.POST, {})
def test_read_by_lines(self):
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
self.assertEqual(list(request), [b'name=value'])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
raw_data = request.body
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
raw_data = request.body
self.assertEqual(request.read(1), b'n')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = "\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']).encode('utf-8')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)})
raw_data = request.body
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b'--boundary\r\nC')
self.assertEqual(request.POST, {'name': ['value']})
def test_raw_post_data_returns_body(self):
"""
HttpRequest.raw_post_body should be the same as HttpRequest.body
"""
payload = b'Hello There!'
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': StringIO(payload)
})
with warnings.catch_warnings(record=True):
self.assertEqual(request.body, request.raw_post_data)
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception should be identifiable (not a generic IOError).
"""
class ExplodingStringIO(StringIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingStringIO(payload)})
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
with self.assertRaises(UnreadablePostError):
request.raw_post_data
self.assertEqual(len(w), 1)
|
cobalys/django
|
tests/regressiontests/requests/tests.py
|
Python
|
bsd-3-clause
| 19,041
|
from __future__ import unicode_literals
from reviewboard.signals import initializing
def connect_signals(**kwargs):
"""
Listens to the ``initializing`` signal and tells other modules to
connect their signals. This is done so as to guarantee that django
is loaded first.
"""
from reviewboard.notifications import email, webhooks
email.connect_signals()
webhooks.connect_signals()
initializing.connect(connect_signals)
|
chipx86/reviewboard
|
reviewboard/notifications/__init__.py
|
Python
|
mit
| 455
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RetentionPolicyParameters(Model):
"""Parameters that define the retention policy for flow log.
:param days: Number of days to retain flow log records. Default value: 0 .
:type days: int
:param enabled: Flag to enable/disable retention. Default value: False .
:type enabled: bool
"""
_attribute_map = {
'days': {'key': 'days', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(RetentionPolicyParameters, self).__init__(**kwargs)
self.days = kwargs.get('days', 0)
self.enabled = kwargs.get('enabled', False)
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/retention_policy_parameters.py
|
Python
|
mit
| 1,157
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_target_https_proxy
description:
- Represents a TargetHttpsProxy resource, which is used by one or more global forwarding
rule to route incoming HTTPS requests to a URL map.
short_description: Creates a GCP TargetHttpsProxy
version_added: 2.6
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
state:
description:
- Whether the given object should exist in GCP
choices:
- present
- absent
default: present
description:
description:
- An optional description of this resource.
required: false
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
required: true
quic_override:
description:
- Specifies the QUIC override policy for this resource. This determines whether
the load balancer will attempt to negotiate QUIC with clients or not. Can specify
one of NONE, ENABLE, or DISABLE. If NONE is specified, uses the QUIC policy
with no user overrides, which is equivalent to DISABLE. Not specifying this
field is equivalent to specifying NONE.
required: false
version_added: 2.7
choices:
- NONE
- ENABLE
- DISABLE
ssl_certificates:
description:
- A list of SslCertificate resources that are used to authenticate connections
between users and the load balancer. Currently, exactly one SSL certificate
must be specified.
required: true
ssl_policy:
description:
- A reference to the SslPolicy resource that will be associated with the TargetHttpsProxy
resource. If not set, the TargetHttpsProxy resource will not have any SSL policy
configured.
- 'This field represents a link to a SslPolicy resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_ssl_policy
task and then set this ssl_policy field to "{{ name-of-resource }}"'
required: false
version_added: 2.8
url_map:
description:
- A reference to the UrlMap resource that defines the mapping from URL to the
BackendService.
- 'This field represents a link to a UrlMap resource in GCP. It can be specified
in two ways. First, you can place in the selfLink of the resource here as a
string Alternatively, you can add `register: name-of-resource` to a gcp_compute_url_map
task and then set this url_map field to "{{ name-of-resource }}"'
required: true
extends_documentation_fragment: gcp
notes:
- 'API Reference: U(https://cloud.google.com/compute/docs/reference/latest/targetHttpsProxies)'
- 'Official Documentation: U(https://cloud.google.com/compute/docs/load-balancing/http/target-proxies)'
'''
EXAMPLES = '''
- name: create a instance group
gcp_compute_instance_group:
name: "instancegroup-targethttpsproxy"
zone: us-central1-a
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: instancegroup
- name: create a http health check
gcp_compute_http_health_check:
name: "httphealthcheck-targethttpsproxy"
healthy_threshold: 10
port: 8080
timeout_sec: 2
unhealthy_threshold: 5
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: healthcheck
- name: create a backend service
gcp_compute_backend_service:
name: "backendservice-targethttpsproxy"
backends:
- group: "{{ instancegroup }}"
health_checks:
- "{{ healthcheck.selfLink }}"
enable_cdn: true
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: backendservice
- name: create a url map
gcp_compute_url_map:
name: "urlmap-targethttpsproxy"
default_service: "{{ backendservice }}"
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: urlmap
- name: create a ssl certificate
gcp_compute_ssl_certificate:
name: "sslcert-targethttpsproxy"
description: A certificate for testing. Do not use this certificate in production
certificate: |
-----BEGIN CERTIFICATE-----
MIICqjCCAk+gAwIBAgIJAIuJ+0352Kq4MAoGCCqGSM49BAMCMIGwMQswCQYDVQQG
EwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjERMA8GA1UEBwwIS2lya2xhbmQxFTAT
BgNVBAoMDEdvb2dsZSwgSW5jLjEeMBwGA1UECwwVR29vZ2xlIENsb3VkIFBsYXRm
b3JtMR8wHQYDVQQDDBZ3d3cubXktc2VjdXJlLXNpdGUuY29tMSEwHwYJKoZIhvcN
AQkBFhJuZWxzb25hQGdvb2dsZS5jb20wHhcNMTcwNjI4MDQ1NjI2WhcNMjcwNjI2
MDQ1NjI2WjCBsDELMAkGA1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xETAP
BgNVBAcMCEtpcmtsYW5kMRUwEwYDVQQKDAxHb29nbGUsIEluYy4xHjAcBgNVBAsM
FUdvb2dsZSBDbG91ZCBQbGF0Zm9ybTEfMB0GA1UEAwwWd3d3Lm15LXNlY3VyZS1z
aXRlLmNvbTEhMB8GCSqGSIb3DQEJARYSbmVsc29uYUBnb29nbGUuY29tMFkwEwYH
KoZIzj0CAQYIKoZIzj0DAQcDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ
4mzkzTv0dXyB750fOGN02HtkpBOZzzvUARTR10JQoSe2/5PIwaNQME4wHQYDVR0O
BBYEFKIQC3A2SDpxcdfn0YLKineDNq/BMB8GA1UdIwQYMBaAFKIQC3A2SDpxcdfn
0YLKineDNq/BMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSQAwRgIhALs4vy+O
M3jcqgA4fSW/oKw6UJxp+M6a+nGMX+UJR3YgAiEAvvl39QRVAiv84hdoCuyON0lJ
zqGNhIPGq2ULqXKK8BY=
-----END CERTIFICATE-----
private_key: |
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEIObtRo8tkUqoMjeHhsOh2ouPpXCgBcP+EDxZCB/tws15oAoGCCqGSM49
AwEHoUQDQgAEHGzpcRJ4XzfBJCCPMQeXQpTXwlblimODQCuQ4mzkzTv0dXyB750f
OGN02HtkpBOZzzvUARTR10JQoSe2/5PIwQ==
-----END EC PRIVATE KEY-----
project: "{{ gcp_project }}"
auth_kind: "{{ gcp_cred_kind }}"
service_account_file: "{{ gcp_cred_file }}"
state: present
register: sslcert
- name: create a target https proxy
gcp_compute_target_https_proxy:
name: "test_object"
ssl_certificates:
- "{{ sslcert }}"
url_map: "{{ urlmap }}"
project: "test_project"
auth_kind: "serviceaccount"
service_account_file: "/tmp/auth.pem"
state: present
'''
RETURN = '''
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. Provided by the client when the resource is created. The
name must be 1-63 characters long, and comply with RFC1035. Specifically, the
name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`
which means the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last character,
which cannot be a dash.
returned: success
type: str
quicOverride:
description:
- Specifies the QUIC override policy for this resource. This determines whether
the load balancer will attempt to negotiate QUIC with clients or not. Can specify
one of NONE, ENABLE, or DISABLE. If NONE is specified, uses the QUIC policy with
no user overrides, which is equivalent to DISABLE. Not specifying this field is
equivalent to specifying NONE.
returned: success
type: str
sslCertificates:
description:
- A list of SslCertificate resources that are used to authenticate connections between
users and the load balancer. Currently, exactly one SSL certificate must be specified.
returned: success
type: list
sslPolicy:
description:
- A reference to the SslPolicy resource that will be associated with the TargetHttpsProxy
resource. If not set, the TargetHttpsProxy resource will not have any SSL policy
configured.
returned: success
type: str
urlMap:
description:
- A reference to the UrlMap resource that defines the mapping from URL to the BackendService.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
import time
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = GcpModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
description=dict(type='str'),
name=dict(required=True, type='str'),
quic_override=dict(type='str', choices=['NONE', 'ENABLE', 'DISABLE']),
ssl_certificates=dict(required=True, type='list'),
ssl_policy=dict(),
url_map=dict(required=True),
)
)
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
state = module.params['state']
kind = 'compute#targetHttpsProxy'
fetch = fetch_resource(module, self_link(module), kind)
changed = False
if fetch:
if state == 'present':
if is_different(module, fetch):
update(module, self_link(module), kind, fetch)
fetch = fetch_resource(module, self_link(module), kind)
changed = True
else:
delete(module, self_link(module), kind)
fetch = {}
changed = True
else:
if state == 'present':
fetch = create(module, collection(module), kind)
changed = True
else:
fetch = {}
fetch.update({'changed': changed})
module.exit_json(**fetch)
def create(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.post(link, resource_to_request(module)))
def update(module, link, kind, fetch):
update_fields(module, resource_to_request(module), response_to_hash(module, fetch))
return fetch_resource(module, self_link(module), kind)
def update_fields(module, request, response):
if response.get('quicOverride') != request.get('quicOverride'):
quic_override_update(module, request, response)
if response.get('sslCertificates') != request.get('sslCertificates'):
ssl_certificates_update(module, request, response)
if response.get('sslPolicy') != request.get('sslPolicy'):
ssl_policy_update(module, request, response)
if response.get('urlMap') != request.get('urlMap'):
url_map_update(module, request, response)
def quic_override_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/targetHttpsProxies/{name}/setQuicOverride"]).format(**module.params),
{u'quicOverride': module.params.get('quic_override')},
)
def ssl_certificates_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/targetHttpsProxies/{name}/setSslCertificates"]).format(**module.params),
{u'sslCertificates': replace_resource_dict(module.params.get('ssl_certificates', []), 'selfLink')},
)
def ssl_policy_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/global/targetHttpsProxies/{name}/setSslPolicy"]).format(**module.params),
{u'sslPolicy': replace_resource_dict(module.params.get(u'ssl_policy', {}), 'selfLink')},
)
def url_map_update(module, request, response):
auth = GcpSession(module, 'compute')
auth.post(
''.join(["https://www.googleapis.com/compute/v1/", "projects/{project}/targetHttpsProxies/{name}/setUrlMap"]).format(**module.params),
{u'urlMap': replace_resource_dict(module.params.get(u'url_map', {}), 'selfLink')},
)
def delete(module, link, kind):
auth = GcpSession(module, 'compute')
return wait_for_operation(module, auth.delete(link))
def resource_to_request(module):
request = {
u'kind': 'compute#targetHttpsProxy',
u'description': module.params.get('description'),
u'name': module.params.get('name'),
u'quicOverride': module.params.get('quic_override'),
u'sslCertificates': replace_resource_dict(module.params.get('ssl_certificates', []), 'selfLink'),
u'sslPolicy': replace_resource_dict(module.params.get(u'ssl_policy', {}), 'selfLink'),
u'urlMap': replace_resource_dict(module.params.get(u'url_map', {}), 'selfLink'),
}
return_vals = {}
for k, v in request.items():
if v or v is False:
return_vals[k] = v
return return_vals
def fetch_resource(module, link, kind, allow_not_found=True):
auth = GcpSession(module, 'compute')
return return_if_object(module, auth.get(link), kind, allow_not_found)
def self_link(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/targetHttpsProxies/{name}".format(**module.params)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/targetHttpsProxies".format(**module.params)
def return_if_object(module, response, kind, allow_not_found=False):
# If not found, return nothing.
if allow_not_found and response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError):
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
def is_different(module, response):
request = resource_to_request(module)
response = response_to_hash(module, response)
# Remove all output-only from response.
response_vals = {}
for k, v in response.items():
if k in request:
response_vals[k] = v
request_vals = {}
for k, v in request.items():
if k in response:
request_vals[k] = v
return GcpRequest(request_vals) != GcpRequest(response_vals)
# Remove unnecessary properties from the response.
# This is for doing comparisons with Ansible's current parameters.
def response_to_hash(module, response):
return {
u'creationTimestamp': response.get(u'creationTimestamp'),
u'description': module.params.get('description'),
u'id': response.get(u'id'),
u'name': module.params.get('name'),
u'quicOverride': response.get(u'quicOverride'),
u'sslCertificates': response.get(u'sslCertificates'),
u'sslPolicy': response.get(u'sslPolicy'),
u'urlMap': response.get(u'urlMap'),
}
def async_op_url(module, extra_data=None):
if extra_data is None:
extra_data = {}
url = "https://www.googleapis.com/compute/v1/projects/{project}/global/operations/{op_id}"
combined = extra_data.copy()
combined.update(module.params)
return url.format(**combined)
def wait_for_operation(module, response):
op_result = return_if_object(module, response, 'compute#operation')
if op_result is None:
return {}
status = navigate_hash(op_result, ['status'])
wait_done = wait_for_completion(status, op_result, module)
return fetch_resource(module, navigate_hash(wait_done, ['targetLink']), 'compute#targetHttpsProxy')
def wait_for_completion(status, op_result, module):
op_id = navigate_hash(op_result, ['name'])
op_uri = async_op_url(module, {'op_id': op_id})
while status != 'DONE':
raise_if_errors(op_result, ['error', 'errors'], module)
time.sleep(1.0)
op_result = fetch_resource(module, op_uri, 'compute#operation', False)
status = navigate_hash(op_result, ['status'])
return op_result
def raise_if_errors(response, err_path, module):
errors = navigate_hash(response, err_path)
if errors is not None:
module.fail_json(msg=errors)
if __name__ == '__main__':
main()
|
andmos/ansible
|
lib/ansible/modules/cloud/google/gcp_compute_target_https_proxy.py
|
Python
|
gpl-3.0
| 18,291
|
#!/usr/bin/python
#
# Cppcheck - A tool for static C/C++ code analysis
# Copyright (C) 2007-2016 Cppcheck team.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Extract test cases information from Cppcheck test
file
"""
import os
import sys
import re
class Extract:
"""
Read Cppcheck test file and create data
representation
"""
# array that stores all the test cases
nodes = []
def parseFile(self, filename):
"""
parse test file and add info to the nodes
variable
"""
name = '[0-9a-zA-Z_]+'
string = '\\"(.+)\\"'
testclass = None
functionName = None
code = None
fin = open(filename, 'r')
for line in fin:
# testclass starts
res = re.match('class (' + name + ')', line)
if res is not None:
testclass = res.group(1)
# end of testclass
if re.match('};', line) is not None:
testclass = None
# function start
res = re.match('\\s+void (' + name + ')\\(\\)', line)
if res is not None:
functionName = res.group(1)
elif re.match('\\s+}', line) is not None:
functionName = None
if functionName is None:
continue
# check
res = re.match('\s+check.*\(' + string, line)
if res is not None:
code = res.group(1)
# code..
if code is not None:
res = re.match('\\s+' + string, line)
if res is not None:
code = code + res.group(1)
# assert
res = re.match('\\s+ASSERT_EQUALS\\(\\"([^"]*)\\",', line)
if res is not None and code is not None:
node = {'testclass': testclass,
'functionName': functionName,
'code': code,
'expected': res.group(1)}
self.nodes.append(node)
code = None
# close test file
fin.close()
def strtoxml(s):
"""Convert string to xml/html format"""
return s.replace('&', '&').replace('"', '"').replace('<', '<').replace('>', '>')
def trimname(name):
"""Trim test name. Trailing underscore and digits are removed"""
while name[-1].isdigit():
name = name[:-1]
if name[-1] == '_':
name = name[:-1]
return name
def writeHtmlFile(nodes, functionName, filename, errorsOnly):
"""Write html file for a function name"""
fout = open(filename, 'w')
fout.write('<html>\n')
fout.write('<head>\n')
fout.write(' <style type="text/css">\n')
fout.write(' body { font-size: 0.8em }\n')
fout.write(
' th { background-color: #A3C159; text-transform: uppercase }\n')
fout.write(' td { background-color: white; vertical-align: text-top }\n')
fout.write(' pre { background-color: #EEEEEE }\n')
fout.write(' </style>\n')
fout.write('</head>\n')
fout.write('<body>\n')
fout.write('<a href="index.htm">Home</a> -- ')
if errorsOnly:
fout.write('<a href="all-' + functionName + '.htm">All test cases</a>')
else:
fout.write(
'<a href="errors-' + functionName + '.htm">Error test cases</a>')
fout.write('<br><br>')
testclass = None
num = 0
for node in nodes:
if errorsOnly and node['expected'] == '':
continue
if trimname(node['functionName']) == functionName:
num = num + 1
if not testclass:
testclass = node['testclass']
fout.write(
'<h1>' + node['testclass'] + '::' + functionName + '</h1>')
fout.write('<table border="0" cellspacing="0">\n')
fout.write(
' <tr><th>Nr</th><th>Code</th><th>Expected</th></tr>\n')
fout.write(' <tr><td>' + str(num) + '</td>')
fout.write('<td><pre>' + strtoxml(
node['code']).replace('\\n', '\n') + '</pre></td>')
fout.write(
'<td>' + strtoxml(node['expected']).replace('\\n', '<br>') + '</td>')
fout.write('</tr>\n')
if testclass is not None:
fout.write('</table>\n')
fout.write('</body></html>\n')
fout.close()
if len(sys.argv) <= 1 or '--help' in sys.argv:
print ('Extract test cases from test file')
print (
'Syntax: extracttests.py [--html=folder] [--xml] [--code=folder] path/testfile.cpp')
sys.exit(0)
# parse command line
xml = False
filename = None
htmldir = None
codedir = None
for arg in sys.argv[1:]:
if arg == '--xml':
xml = True
elif arg.startswith('--html='):
htmldir = arg[7:]
elif arg.startswith('--code='):
codedir = arg[7:]
elif arg.endswith('.cpp'):
filename = arg
else:
print ('Invalid option: ' + arg)
sys.exit(1)
# extract test cases
if filename is not None:
# parse test file
e = Extract()
e.parseFile(filename)
# generate output
if xml:
print ('<?xml version="1.0"?>')
print ('<tree>')
count = 0
for node in e.nodes:
s = ' <node'
s += ' function="' + node['functionName'] + '"'
s += ' code="' + strtoxml(node['code']) + '"'
s += ' expected="' + strtoxml(node['expected']) + '"'
s += '/>'
print (s)
print ('</tree>')
elif htmldir is not None:
if not htmldir.endswith('/'):
htmldir += '/'
if not os.path.exists(htmldir):
os.mkdir(htmldir)
findex = open(htmldir + 'index.htm', 'w')
findex.write('<html>\n')
findex.write('<head>\n')
findex.write(' <style type="text/css">\n')
findex.write(' table { font-size: 0.8em }\n')
findex.write(
' th { background-color: #A3C159; text-transform: uppercase }\n')
findex.write(
' td { background-color: #F0FFE0; vertical-align: text-top }\n')
findex.write(' A:link { text-decoration: none }\n')
findex.write(' A:visited { text-decoration: none }\n')
findex.write(' A:active { text-decoration: none }\n')
findex.write(' A:hover { text-decoration: underline; color: blue }\n')
findex.write(' </style>\n')
findex.write('</head>\n')
findex.write('<body>\n')
findex.write('<h1>' + filename + '</h1>\n')
functionNames = []
for node in e.nodes:
functionname = trimname(node['functionName'])
if functionname not in functionNames:
functionNames.append(functionname)
functionNames.sort()
findex.write('<table border="0" cellspacing="0">\n')
findex.write(' <tr><th>Name</th><th>Errors</th><th>All</th></tr>\n')
for functionname in functionNames:
findex.write(' <tr><td>' + functionname + '</td>')
numall = 0
numerr = 0
for node in e.nodes:
if trimname(node['functionName']) == functionname:
numall = numall + 1
if node['expected'] != '':
numerr = numerr + 1
if numerr == 0:
findex.write('<td><div align="right">0</div></td>')
else:
findex.write('<td><a href="errors-' + functionname +
'.htm"><div align="right">' + str(numerr) + '</div></a></td>')
findex.write('<td><a href="all-' + functionname +
'.htm"><div align="right">' + str(numall) + '</div></a></td>')
findex.write('</tr>\n')
findex.write('</table>\n')
findex.write('</body></html>')
findex.close()
# create files for each functionName
for functionName in functionNames:
writeHtmlFile(e.nodes,
functionName,
htmldir + 'errors-' + functionName + '.htm',
True)
writeHtmlFile(e.nodes,
functionName,
htmldir + 'all-' + functionName + '.htm',
False)
elif codedir:
testnum = 0
if not codedir.endswith('/'):
codedir = codedir + '/'
if not os.path.exists(codedir):
os.mkdir(codedir)
errors = open(codedir + 'errors.txt', 'w')
for node in e.nodes:
testnum = testnum + 1
functionName = node['functionName']
code = node['code']
code = code.replace('\\n', '\n')
code = code.replace('\\"', '"')
expected = node['expected']
filename = '0000' + str(testnum) + '-'
filename = filename[-4:]
filename += functionName + '.cpp'
# source code
fout = open(codedir + filename, 'w')
fout.write(code)
fout.close()
# write 'expected' to errors.txt
if expected != '':
expected = expected.replace('\\n', '\n')
expected = expected.replace('\\"', '"')
expected = re.sub(
'\\[test.cp?p?:', '[' + filename + ':', expected)
errors.write(expected)
errors.close()
else:
for node in e.nodes:
print (node['functionName'])
|
Blubbz0r/cppcheck
|
tools/extracttests.py
|
Python
|
gpl-3.0
| 10,133
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Igor Gnatenko <i.gnatenko.brain@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: dnf
version_added: 1.9
short_description: Manages packages with the I(dnf) package manager
description:
- Installs, upgrade, removes, and lists packages and groups with the I(dnf) package manager.
options:
name:
description:
- "Package name, or package specifier with version, like C(name-1.0). When using state=latest, this can be '*' which means run: dnf -y update. You can also pass a url or a local path to a rpm file."
required: true
default: null
aliases: []
list:
description:
- Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples.
required: false
default: null
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
required: false
choices: [ "present", "latest", "absent" ]
default: "present"
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
default: null
aliases: []
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
required: false
default: null
aliases: []
conf_file:
description:
- The remote dnf configuration file to use for the transaction.
required: false
default: null
aliases: []
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
notes: []
# informational: requirements for nodes
requirements:
- "python >= 2.6"
- python-dnf
author:
- '"Igor Gnatenko (@ignatenkobrain)" <i.gnatenko.brain@gmail.com>'
- '"Cristian van Ee (@DJMuggs)" <cristian at cvee.org>'
'''
EXAMPLES = '''
- name: install the latest version of Apache
dnf: name=httpd state=latest
- name: remove the Apache package
dnf: name=httpd state=absent
- name: install the latest version of Apache from the testing repo
dnf: name=httpd enablerepo=testing state=present
- name: upgrade all packages
dnf: name=* state=latest
- name: install the nginx rpm from a remote repo
dnf: name=http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm state=present
- name: install nginx rpm from a local file
dnf: name=/usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm state=present
- name: install the 'Development tools' package group
dnf: name="@Development tools" state=present
'''
import os
try:
import dnf
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
HAS_DNF = True
except ImportError:
HAS_DNF = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import PY2
def _ensure_dnf(module):
if not HAS_DNF:
if PY2:
package = 'python2-dnf'
else:
package = 'python3-dnf'
if module.check_mode:
module.fail_json(msg="`{0}` is not installed, but it is required"
" for the Ansible dnf module.".format(package))
module.run_command(['dnf', 'install', '-y', package], check_rc=True)
global dnf
try:
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
except ImportError:
module.fail_json(msg="Could not import the dnf python module."
" Please install `{0}` package.".format(package))
def _configure_base(module, base, conf_file, disable_gpg_check):
"""Configure the dnf Base object."""
conf = base.conf
# Turn off debug messages in the output
conf.debuglevel = 0
# Set whether to check gpg signatures
conf.gpgcheck = not disable_gpg_check
# Don't prompt for user confirmations
conf.assumeyes = True
# Change the configuration file path if provided
if conf_file:
# Fail if we can't read the configuration file.
if not os.access(conf_file, os.R_OK):
module.fail_json(
msg="cannot read configuration file", conf_file=conf_file)
else:
conf.config_file_path = conf_file
# Read the configuration file
conf.read()
def _specify_repositories(base, disablerepo, enablerepo):
"""Enable and disable repositories matching the provided patterns."""
base.read_all_repos()
repos = base.repos
# Disable repositories
for repo_pattern in disablerepo:
for repo in repos.get_matching(repo_pattern):
repo.disable()
# Enable repositories
for repo_pattern in enablerepo:
for repo in repos.get_matching(repo_pattern):
repo.enable()
def _base(module, conf_file, disable_gpg_check, disablerepo, enablerepo):
"""Return a fully configured dnf Base object."""
base = dnf.Base()
_configure_base(module, base, conf_file, disable_gpg_check)
_specify_repositories(base, disablerepo, enablerepo)
base.fill_sack(load_system_repo='auto')
return base
def _package_dict(package):
"""Return a dictionary of information for the package."""
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
'name': package.name,
'arch': package.arch,
'epoch': str(package.epoch),
'release': package.release,
'version': package.version,
'repo': package.repoid}
result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format(
**result)
return result
def list_items(module, base, command):
"""List package info based on the command."""
# Rename updates to upgrades
if command == 'updates':
command = 'upgrades'
# Return the corresponding packages
if command in ['installed', 'upgrades', 'available']:
results = [
_package_dict(package)
for package in getattr(base.sack.query(), command)()]
# Return the enabled repository ids
elif command in ['repos', 'repositories']:
results = [
{'repoid': repo.id, 'state': 'enabled'}
for repo in base.repos.iter_enabled()]
# Return any matching packages
else:
packages = dnf.subject.Subject(command).get_best_query(base.sack)
results = [_package_dict(package) for package in packages]
module.exit_json(results=results)
def _mark_package_install(module, base, pkg_spec):
"""Mark the package for install."""
try:
base.install(pkg_spec)
except dnf.exceptions.MarkingError:
module.fail_json(msg="No package {} available.".format(pkg_spec))
def _parse_spec_group_file(names):
pkg_specs, grp_specs, filenames = [], [], []
for name in names:
if name.endswith(".rpm"):
filenames.append(name)
elif name.startswith("@"):
grp_specs.append(name[1:])
else:
pkg_specs.append(name)
return pkg_specs, grp_specs, filenames
def _install_remote_rpms(base, filenames):
if int(dnf.__version__.split(".")[0]) >= 2:
pkgs = list(sorted(base.add_remote_rpms(list(filenames)), reverse=True))
else:
pkgs = []
for filename in filenames:
pkgs.append(base.add_remote_rpm(filename))
for pkg in pkgs:
base.package_install(pkg)
def ensure(module, base, state, names):
# Accumulate failures. Package management modules install what they can
# and fail with a message about what they can't.
failures = []
allow_erasing = False
if names == ['*'] and state == 'latest':
base.upgrade_all()
else:
pkg_specs, group_specs, filenames = _parse_spec_group_file(names)
if group_specs:
base.read_comps()
pkg_specs = [p.strip() for p in pkg_specs]
filenames = [f.strip() for f in filenames]
groups = []
environments = []
for group_spec in (g.strip() for g in group_specs):
group = base.comps.group_by_pattern(group_spec)
if group:
groups.append(group)
else:
environment = base.comps.environment_by_pattern(group_spec)
if environment:
environments.append(environment.id)
else:
module.fail_json(
msg="No group {} available.".format(group_spec))
if state in ['installed', 'present']:
# Install files.
_install_remote_rpms(base, filenames)
# Install groups.
for group in groups:
try:
base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
# In dnf 2.0 if all the mandatory packages in a group do
# not install, an error is raised. We want to capture
# this but still install as much as possible.
failures.append((group, e))
for environment in environments:
try:
base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
# Install packages.
for pkg_spec in pkg_specs:
_mark_package_install(module, base, pkg_spec)
elif state == 'latest':
# "latest" is same as "installed" for filenames.
_install_remote_rpms(base, filenames)
for group in groups:
try:
try:
base.group_upgrade(group)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
for environment in environments:
try:
try:
base.environment_upgrade(environment)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
base.environment_install(group, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.Error as e:
failures.append((group, e))
for pkg_spec in pkg_specs:
# best effort causes to install the latest package
# even if not previously installed
base.conf.best = True
base.install(pkg_spec)
else:
# state == absent
if filenames:
module.fail_json(
msg="Cannot remove paths -- please specify package name.")
for group in groups:
try:
base.group_remove(group)
except dnf.exceptions.CompsError:
# Group is already uninstalled.
pass
for envioronment in environments:
try:
base.environment_remove(environment)
except dnf.exceptions.CompsError:
# Environment is already uninstalled.
pass
installed = base.sack.query().installed()
for pkg_spec in pkg_specs:
if installed.filter(name=pkg_spec):
base.remove(pkg_spec)
# Like the dnf CLI we want to allow recursive removal of dependent
# packages
allow_erasing = True
if not base.resolve(allow_erasing=allow_erasing):
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(msg="Nothing to do")
else:
if module.check_mode:
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(changed=True)
base.download_packages(base.transaction.install_set)
base.do_transaction()
response = {'changed': True, 'results': []}
for package in base.transaction.install_set:
response['results'].append("Installed: {0}".format(package))
for package in base.transaction.remove_set:
response['results'].append("Removed: {0}".format(package))
if failures:
module.fail_json(msg='Failed to install some of the specified packages',
failures=failures)
module.exit_json(**response)
def main():
"""The main function."""
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['pkg'], type='list'),
state=dict(
default='installed',
choices=[
'absent', 'present', 'installed', 'removed', 'latest']),
enablerepo=dict(type='list', default=[]),
disablerepo=dict(type='list', default=[]),
list=dict(),
conf_file=dict(default=None, type='path'),
disable_gpg_check=dict(default=False, type='bool'),
),
required_one_of=[['name', 'list']],
mutually_exclusive=[['name', 'list']],
supports_check_mode=True)
params = module.params
_ensure_dnf(module)
if params['list']:
base = _base(
module, params['conf_file'], params['disable_gpg_check'],
params['disablerepo'], params['enablerepo'])
list_items(module, base, params['list'])
else:
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf.util.am_i_root():
module.fail_json(msg="This command has to be run under the root user.")
base = _base(
module, params['conf_file'], params['disable_gpg_check'],
params['disablerepo'], params['enablerepo'])
ensure(module, base, params['state'], params['name'])
if __name__ == '__main__':
main()
|
dimid/ansible-modules-extras
|
packaging/os/dnf.py
|
Python
|
gpl-3.0
| 15,510
|
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests AsyncIO version of grpcio-health-checking."""
import asyncio
import logging
import random
import time
import unittest
import grpc
from grpc.experimental import aio
from grpc_health.v1 import health
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_SERVING_SERVICE = 'grpc.test.TestServiceServing'
_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown'
_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing'
_WATCH_SERVICE = 'grpc.test.WatchService'
_LARGE_NUMBER_OF_STATUS_CHANGES = 1000
async def _pipe_to_queue(call, queue):
async for response in call:
await queue.put(response)
class HealthServicerTest(AioTestBase):
async def setUp(self):
self._servicer = health.aio.HealthServicer()
await self._servicer.set(_SERVING_SERVICE,
health_pb2.HealthCheckResponse.SERVING)
await self._servicer.set(_UNKNOWN_SERVICE,
health_pb2.HealthCheckResponse.UNKNOWN)
await self._servicer.set(_NOT_SERVING_SERVICE,
health_pb2.HealthCheckResponse.NOT_SERVING)
self._server = aio.server()
port = self._server.add_insecure_port('[::]:0')
health_pb2_grpc.add_HealthServicer_to_server(self._servicer,
self._server)
await self._server.start()
self._channel = aio.insecure_channel('localhost:%d' % port)
self._stub = health_pb2_grpc.HealthStub(self._channel)
async def tearDown(self):
await self._channel.close()
await self._server.stop(None)
async def test_check_empty_service(self):
request = health_pb2.HealthCheckRequest()
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
async def test_check_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
async def test_check_unknown_service(self):
request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
async def test_check_not_serving_service(self):
request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
resp.status)
async def test_check_not_found_service(self):
request = health_pb2.HealthCheckRequest(service='not-found')
with self.assertRaises(aio.AioRpcError) as context:
await self._stub.Check(request)
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
async def test_health_service_name(self):
self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health')
async def test_watch_empty_service(self):
request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
(await queue.get()).status)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_watch_new_service(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status)
await self._servicer.set(_WATCH_SERVICE,
health_pb2.HealthCheckResponse.SERVING)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
(await queue.get()).status)
await self._servicer.set(_WATCH_SERVICE,
health_pb2.HealthCheckResponse.NOT_SERVING)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
(await queue.get()).status)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_watch_service_isolation(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status)
await self._servicer.set('some-other-service',
health_pb2.HealthCheckResponse.SERVING)
# The change of health status in other service should be isolated.
# Hence, no additional notification should be observed.
with self.assertRaises(asyncio.TimeoutError):
await asyncio.wait_for(queue.get(), test_constants.SHORT_TIMEOUT)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_two_watchers(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
queue1 = asyncio.Queue()
queue2 = asyncio.Queue()
call1 = self._stub.Watch(request)
call2 = self._stub.Watch(request)
task1 = self.loop.create_task(_pipe_to_queue(call1, queue1))
task2 = self.loop.create_task(_pipe_to_queue(call2, queue2))
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue1.get()).status)
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue2.get()).status)
await self._servicer.set(_WATCH_SERVICE,
health_pb2.HealthCheckResponse.SERVING)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
(await queue1.get()).status)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
(await queue2.get()).status)
call1.cancel()
call2.cancel()
with self.assertRaises(asyncio.CancelledError):
await task1
with self.assertRaises(asyncio.CancelledError):
await task2
self.assertTrue(queue1.empty())
self.assertTrue(queue2.empty())
async def test_cancelled_watch_removed_from_watch_list(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status)
call.cancel()
await self._servicer.set(_WATCH_SERVICE,
health_pb2.HealthCheckResponse.SERVING)
with self.assertRaises(asyncio.CancelledError):
await task
# Wait for the serving coroutine to process client cancellation.
timeout = time.monotonic() + test_constants.TIME_ALLOWANCE
while (time.monotonic() < timeout and self._servicer._server_watchers):
await asyncio.sleep(1)
self.assertFalse(self._servicer._server_watchers,
'There should not be any watcher left')
self.assertTrue(queue.empty())
async def test_graceful_shutdown(self):
request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
(await queue.get()).status)
await self._servicer.enter_graceful_shutdown()
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
(await queue.get()).status)
# This should be a no-op.
await self._servicer.set(health.OVERALL_HEALTH,
health_pb2.HealthCheckResponse.SERVING)
resp = await self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
resp.status)
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
async def test_no_duplicate_status(self):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
call = self._stub.Watch(request)
queue = asyncio.Queue()
task = self.loop.create_task(_pipe_to_queue(call, queue))
self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
(await queue.get()).status)
last_status = health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
for _ in range(_LARGE_NUMBER_OF_STATUS_CHANGES):
if random.randint(0, 1) == 0:
status = health_pb2.HealthCheckResponse.SERVING
else:
status = health_pb2.HealthCheckResponse.NOT_SERVING
await self._servicer.set(_WATCH_SERVICE, status)
if status != last_status:
self.assertEqual(status, (await queue.get()).status)
last_status = status
call.cancel()
with self.assertRaises(asyncio.CancelledError):
await task
self.assertTrue(queue.empty())
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
|
ctiller/grpc
|
src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
|
Python
|
apache-2.0
| 10,755
|
# Natural Language Toolkit: Zen Chatbot
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Peter Spiller <pspiller@csse.unimelb.edu.au>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
from util import *
pairs = (
(r'We (.*)',
("What do you mean, 'we'?",
"Don't include me in that!",
"I wouldn't be so sure about that.")),
(r'You should (.*)',
("Don't tell me what to do, buddy.",
"Really? I should, should I?")),
(r'You\'re(.*)',
("More like YOU'RE %1!",
"Hah! Look who's talking.",
"Come over here and tell me I'm %1.")),
(r'You are(.*)',
("More like YOU'RE %1!",
"Hah! Look who's talking.",
"Come over here and tell me I'm %1.")),
(r'I can\'t(.*)',
("You do sound like the type who can't %1.",
"Hear that splashing sound? That's my heart bleeding for you.",
"Tell somebody who might actually care.")),
(r'I think (.*)',
("I wouldn't think too hard if I were you.",
"You actually think? I'd never have guessed...")),
(r'I (.*)',
("I'm getting a bit tired of hearing about you.",
"How about we talk about me instead?",
"Me, me, me... Frankly, I don't care.")),
(r'How (.*)',
("How do you think?",
"Take a wild guess.",
"I'm not even going to dignify that with an answer.")),
(r'What (.*)',
("Do I look like an encylopedia?",
"Figure it out yourself.")),
(r'Why (.*)',
("Why not?",
"That's so obvious I thought even you'd have already figured it out.")),
(r'(.*)shut up(.*)',
("Make me.",
"Getting angry at a feeble NLP assignment? Somebody's losing it.",
"Say that again, I dare you.")),
(r'Shut up(.*)',
("Make me.",
"Getting angry at a feeble NLP assignment? Somebody's losing it.",
"Say that again, I dare you.")),
(r'Hello(.*)',
("Oh good, somebody else to talk to. Joy.",
"'Hello'? How original...")),
(r'(.*)',
("I'm getting bored here. Become more interesting.",
"Either become more thrilling or get lost, buddy.",
"Change the subject before I die of fatal boredom."))
)
rude_chatbot = Chat(pairs, reflections)
def rude_chat():
print "Talk to the program by typing in plain English, using normal upper-"
print 'and lower-case letters and punctuation. Enter "quit" when done.'
print '='*72
print "I suppose I should say hello."
rude_chatbot.converse()
def demo():
rude_chat()
if __name__ == "__main__":
demo()
|
hectormartinez/rougexstem
|
taln2016/icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/chat/rude.py
|
Python
|
apache-2.0
| 2,684
|
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
if 'django.contrib.auth' in settings.INSTALLED_APPS:
from tastypie.models import ApiKey
class ApiKeyInline(admin.StackedInline):
model = ApiKey
extra = 0
class ApiKeyAdmin(admin.ModelAdmin):
search_fields = ['key', 'user__username']
raw_id_fields = ['user']
list_display = ['id', 'user', 'key', 'created']
ABSTRACT_APIKEY = getattr(settings, 'TASTYPIE_ABSTRACT_APIKEY', False)
if ABSTRACT_APIKEY and not isinstance(ABSTRACT_APIKEY, bool):
raise TypeError("'TASTYPIE_ABSTRACT_APIKEY' must be either 'True' "
"or 'False'.")
if not ABSTRACT_APIKEY:
admin.site.register(ApiKey, ApiKeyAdmin)
|
Perkville/django-tastypie
|
tastypie/admin.py
|
Python
|
bsd-3-clause
| 804
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.latex
~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for LaTeX fancyvrb output.
:copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.token import Token, STANDARD_TYPES
from pygments.util import get_bool_opt, get_int_opt, StringIO
__all__ = ['LatexFormatter']
def escape_tex(text, commandprefix):
return text.replace('\\', '\x00'). \
replace('{', '\x01'). \
replace('}', '\x02'). \
replace('\x00', r'\%sZbs{}' % commandprefix). \
replace('\x01', r'\%sZob{}' % commandprefix). \
replace('\x02', r'\%sZcb{}' % commandprefix). \
replace('^', r'\%sZca{}' % commandprefix). \
replace('_', r'\%sZus{}' % commandprefix). \
replace('&', r'\%sZam{}' % commandprefix). \
replace('<', r'\%sZlt{}' % commandprefix). \
replace('>', r'\%sZgt{}' % commandprefix). \
replace('#', r'\%sZsh{}' % commandprefix). \
replace('%', r'\%sZpc{}' % commandprefix). \
replace('$', r'\%sZdl{}' % commandprefix). \
replace('-', r'\%sZhy{}' % commandprefix). \
replace("'", r'\%sZsq{}' % commandprefix). \
replace('"', r'\%sZdq{}' % commandprefix). \
replace('~', r'\%sZti{}' % commandprefix)
DOC_TEMPLATE = r'''
\documentclass{%(docclass)s}
\usepackage{fancyvrb}
\usepackage{color}
\usepackage[%(encoding)s]{inputenc}
%(preamble)s
%(styledefs)s
\begin{document}
\section*{%(title)s}
%(code)s
\end{document}
'''
## Small explanation of the mess below :)
#
# The previous version of the LaTeX formatter just assigned a command to
# each token type defined in the current style. That obviously is
# problematic if the highlighted code is produced for a different style
# than the style commands themselves.
#
# This version works much like the HTML formatter which assigns multiple
# CSS classes to each <span> tag, from the most specific to the least
# specific token type, thus falling back to the parent token type if one
# is not defined. Here, the classes are there too and use the same short
# forms given in token.STANDARD_TYPES.
#
# Highlighted code now only uses one custom command, which by default is
# \PY and selectable by the commandprefix option (and in addition the
# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
# backwards compatibility purposes).
#
# \PY has two arguments: the classes, separated by +, and the text to
# render in that style. The classes are resolved into the respective
# style commands by magic, which serves to ignore unknown classes.
#
# The magic macros are:
# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
# to render in \PY@do. Their definition determines the style.
# * \PY@reset resets \PY@it etc. to do nothing.
# * \PY@toks parses the list of classes, using magic inspired by the
# keyval package (but modified to use plusses instead of commas
# because fancyvrb redefines commas inside its environments).
# * \PY@tok processes one class, calling the \PY@tok@classname command
# if it exists.
# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
# for its class.
# * \PY resets the style, parses the classnames and then calls \PY@do.
#
# Tip: to read this code, print it out in substituted form using e.g.
# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
STYLE_TEMPLATE = r'''
\makeatletter
\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
\let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
\let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
\%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
\%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
%(styles)s
\def\%(cp)sZbs{\char`\\}
\def\%(cp)sZus{\char`\_}
\def\%(cp)sZob{\char`\{}
\def\%(cp)sZcb{\char`\}}
\def\%(cp)sZca{\char`\^}
\def\%(cp)sZam{\char`\&}
\def\%(cp)sZlt{\char`\<}
\def\%(cp)sZgt{\char`\>}
\def\%(cp)sZsh{\char`\#}
\def\%(cp)sZpc{\char`\%%}
\def\%(cp)sZdl{\char`\$}
\def\%(cp)sZhy{\char`\-}
\def\%(cp)sZsq{\char`\'}
\def\%(cp)sZdq{\char`\"}
\def\%(cp)sZti{\char`\~}
%% for compatibility with earlier versions
\def\%(cp)sZat{@}
\def\%(cp)sZlb{[}
\def\%(cp)sZrb{]}
\makeatother
'''
def _get_ttype_name(ttype):
fname = STANDARD_TYPES.get(ttype)
if fname:
return fname
aname = ''
while fname is None:
aname = ttype[-1] + aname
ttype = ttype.parent
fname = STANDARD_TYPES.get(ttype)
return fname + aname
class LatexFormatter(Formatter):
r"""
Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
standard packages.
Without the `full` option, code is formatted as one ``Verbatim``
environment, like this:
.. sourcecode:: latex
\begin{Verbatim}[commandchars=\\{\}]
\PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
\PY{k}{pass}
\end{Verbatim}
The special command used here (``\PY``) and all the other macros it needs
are output by the `get_style_defs` method.
With the `full` option, a complete LaTeX document is output, including
the command definitions in the preamble.
The `get_style_defs()` method of a `LatexFormatter` returns a string
containing ``\def`` commands defining the macros needed inside the
``Verbatim`` environments.
Additional options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
`full`
Tells the formatter to output a "full" document, i.e. a complete
self-contained document (default: ``False``).
`title`
If `full` is true, the title that should be used to caption the
document (default: ``''``).
`docclass`
If the `full` option is enabled, this is the document class to use
(default: ``'article'``).
`preamble`
If the `full` option is enabled, this can be further preamble commands,
e.g. ``\usepackage`` (default: ``''``).
`linenos`
If set to ``True``, output line numbers (default: ``False``).
`linenostart`
The line number for the first line (default: ``1``).
`linenostep`
If set to a number n > 1, only every nth line number is printed.
`verboptions`
Additional options given to the Verbatim environment (see the *fancyvrb*
docs for possible values) (default: ``''``).
`commandprefix`
The LaTeX commands used to produce colored output are constructed
using this prefix and some letters (default: ``'PY'``).
*New in Pygments 0.7.*
*New in Pygments 0.10:* the default is now ``'PY'`` instead of ``'C'``.
`texcomments`
If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
in comment tokens is not escaped so that LaTeX can render it (default:
``False``). *New in Pygments 1.2.*
`mathescape`
If set to ``True``, enables LaTeX math mode escape in comments. That
is, ``'$...$'`` inside a comment will trigger math mode (default:
``False``). *New in Pygments 1.2.*
"""
name = 'LaTeX'
aliases = ['latex', 'tex']
filenames = ['*.tex']
def __init__(self, **options):
Formatter.__init__(self, **options)
self.docclass = options.get('docclass', 'article')
self.preamble = options.get('preamble', '')
self.linenos = get_bool_opt(options, 'linenos', False)
self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
self.verboptions = options.get('verboptions', '')
self.nobackground = get_bool_opt(options, 'nobackground', False)
self.commandprefix = options.get('commandprefix', 'PY')
self.texcomments = get_bool_opt(options, 'texcomments', False)
self.mathescape = get_bool_opt(options, 'mathescape', False)
self._create_stylesheet()
def _create_stylesheet(self):
t2n = self.ttype2name = {Token: ''}
c2d = self.cmd2def = {}
cp = self.commandprefix
def rgbcolor(col):
if col:
return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0)
for i in (0, 2, 4)])
else:
return '1,1,1'
for ttype, ndef in self.style:
name = _get_ttype_name(ttype)
cmndef = ''
if ndef['bold']:
cmndef += r'\let\$$@bf=\textbf'
if ndef['italic']:
cmndef += r'\let\$$@it=\textit'
if ndef['underline']:
cmndef += r'\let\$$@ul=\underline'
if ndef['roman']:
cmndef += r'\let\$$@ff=\textrm'
if ndef['sans']:
cmndef += r'\let\$$@ff=\textsf'
if ndef['mono']:
cmndef += r'\let\$$@ff=\textsf'
if ndef['color']:
cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
rgbcolor(ndef['color']))
if ndef['border']:
cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}' %
(rgbcolor(ndef['border']),
rgbcolor(ndef['bgcolor'])))
elif ndef['bgcolor']:
cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
r'\colorbox[rgb]{%s}{\strut ##1}}' %
rgbcolor(ndef['bgcolor']))
if cmndef == '':
continue
cmndef = cmndef.replace('$$', cp)
t2n[ttype] = name
c2d[name] = cmndef
def get_style_defs(self, arg=''):
"""
Return the command sequences needed to define the commands
used to format text in the verbatim environment. ``arg`` is ignored.
"""
cp = self.commandprefix
styles = []
for name, definition in self.cmd2def.iteritems():
styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' %
(cp, name, definition))
return STYLE_TEMPLATE % {'cp': self.commandprefix,
'styles': '\n'.join(styles)}
def format_unencoded(self, tokensource, outfile):
# TODO: add support for background colors
t2n = self.ttype2name
cp = self.commandprefix
if self.full:
realoutfile = outfile
outfile = StringIO()
outfile.write(ur'\begin{Verbatim}[commandchars=\\\{\}')
if self.linenos:
start, step = self.linenostart, self.linenostep
outfile.write(u',numbers=left' +
(start and u',firstnumber=%d' % start or u'') +
(step and u',stepnumber=%d' % step or u''))
if self.mathescape or self.texcomments:
outfile.write(ur',codes={\catcode`\$=3\catcode`\^=7\catcode`\_=8}')
if self.verboptions:
outfile.write(u',' + self.verboptions)
outfile.write(u']\n')
for ttype, value in tokensource:
if ttype in Token.Comment:
if self.texcomments:
# Try to guess comment starting lexeme and escape it ...
start = value[0:1]
for i in xrange(1, len(value)):
if start[0] != value[i]:
break
start += value[i]
value = value[len(start):]
start = escape_tex(start, self.commandprefix)
# ... but do not escape inside comment.
value = start + value
elif self.mathescape:
# Only escape parts not inside a math environment.
parts = value.split('$')
in_math = False
for i, part in enumerate(parts):
if not in_math:
parts[i] = escape_tex(part, self.commandprefix)
in_math = not in_math
value = '$'.join(parts)
else:
value = escape_tex(value, self.commandprefix)
else:
value = escape_tex(value, self.commandprefix)
styles = []
while ttype is not Token:
try:
styles.append(t2n[ttype])
except KeyError:
# not in current style
styles.append(_get_ttype_name(ttype))
ttype = ttype.parent
styleval = '+'.join(reversed(styles))
if styleval:
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
outfile.write('\n')
if spl[-1]:
outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
else:
outfile.write(value)
outfile.write(u'\\end{Verbatim}\n')
if self.full:
realoutfile.write(DOC_TEMPLATE %
dict(docclass = self.docclass,
preamble = self.preamble,
title = self.title,
encoding = self.encoding or 'latin1',
styledefs = self.get_style_defs(),
code = outfile.getvalue()))
|
sysbot/pastedown
|
vendor/pygments/pygments/formatters/latex.py
|
Python
|
mit
| 13,946
|
from __future__ import absolute_import
from .browser import *
from .store import *
__all__ = []
__all__ += browser.__all__
__all__ += store.__all__
|
zejn/cubes
|
cubes/sql/__init__.py
|
Python
|
mit
| 152
|
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import ModeController
import Modes
class ModeControllerCreator:
#if mode_list isn't set in the constructor, the populate_list is going to have to be called before create_controller.
def __init__(self, mode_list=None):
self.mode_list = mode_list or []
#A reference to a class obj. This is the type of controller that will be returned by create_controller.
self.controller_class = ModeController.ModeController
#Sets the class that the controller will be instantiated as. The constructor for the class shouldn't have
#to take any parameters.
def set_controller_class(self, controller_class):
self.controller_class = controller_class
#Populates self.mode_list with concrete implementations of Modes.
def populate_list(self, mode_list):
self.mode_list = mode_list
#using the Modes in self.mode_list, create, populate, and return a ModeController
def create_controller(self):
controller = self.controller_class()
for m in self.mode_list:
controller.add_mode(m)
return controller
def get_controller_creator():
if sys.platform.find('sunos') > -1:
mode_list = [Modes.SolarisDeployMode(), Modes.SolarisDiffMode(), Modes.SolarisUploadMode(), Modes.SolarisMTimeUploadMode(), Modes.SolarisAllMode()]
else:
mode_list = [Modes.DeployMode(), Modes.DiffMode(), Modes.UploadMode(), Modes.MTimeUploadMode(), Modes.AllMode()]
controller = ModeControllerCreator(mode_list=mode_list)
controller.set_controller_class(ModeController.ConfigFilesModeController)
return controller
def get_run_controller_creator():
if sys.platform.find('sunos') > -1:
mode_list = [Modes.SolarisRunMode(), Modes.SolarisRunAllMode()]
else:
mode_list = [Modes.RunMode(), Modes.RunAllMode()]
controller = ModeControllerCreator(mode_list=mode_list)
return controller
|
shastah/spacewalk
|
client/tools/rhncfg/actions/ModeControllerCreator.py
|
Python
|
gpl-2.0
| 2,535
|
"""
Test Help links in LMS
"""
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.pages.lms.instructor_dashboard import InstructorDashboardPage
from common.test.acceptance.tests.discussion.helpers import CohortTestMixin
from common.test.acceptance.tests.lms.test_lms_instructor_dashboard import BaseInstructorDashboardTest
from common.test.acceptance.tests.studio.base_studio_test import ContainerBase
from common.test.acceptance.tests.helpers import (
assert_opened_help_link_is_correct,
url_for_help,
click_and_wait_for_window
)
class TestCohortHelp(ContainerBase, CohortTestMixin):
"""
Tests help links in Cohort page
"""
def setUp(self, is_staff=True):
super(TestCohortHelp, self).setUp(is_staff=is_staff)
self.enable_cohorting(self.course_fixture)
self.instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id)
self.instructor_dashboard_page.visit()
self.cohort_management = self.instructor_dashboard_page.select_cohort_management()
def verify_help_link(self, href):
"""
Verifies that help link is correct
Arguments:
href (str): Help url
"""
help_element = self.cohort_management.get_cohort_help_element()
self.assertEqual(help_element.text, "What does this mean?")
click_and_wait_for_window(self, help_element)
assert_opened_help_link_is_correct(self, href)
def test_manual_cohort_help(self):
"""
Scenario: Help in 'What does it mean?' is correct when we create cohort manually.
Given that I am at 'Cohort' tab of LMS instructor dashboard
And I check 'Enable Cohorts'
And I add cohort name it, choose Manual for Cohort Assignment Method and
No content group for Associated Content Group and save the cohort
Then you see the UI text "Learners are added to this cohort only when..."
followed by "What does this mean" link.
And I click "What does this mean" link then help link should end with
course_features/cohorts/cohort_config.html#assign-learners-to-cohorts-manually
"""
self.cohort_management.add_cohort('cohort_name')
href = url_for_help(
'course_author',
'/course_features/cohorts/cohort_config.html#assign-learners-to-cohorts-manually',
)
self.verify_help_link(href)
def test_automatic_cohort_help(self):
"""
Scenario: Help in 'What does it mean?' is correct when we create cohort automatically.
Given that I am at 'Cohort' tab of LMS instructor dashboard
And I check 'Enable Cohorts'
And I add cohort name it, choose Automatic for Cohort Assignment Method and
No content group for Associated Content Group and save the cohort
Then you see the UI text "Learners are added to this cohort automatically"
followed by "What does this mean" link.
And I click "What does this mean" link then help link should end with
course_features/cohorts/cohorts_overview.html#all-automated-assignment
"""
self.cohort_management.add_cohort('cohort_name', assignment_type='random')
href = url_for_help(
'course_author',
'/course_features/cohorts/cohorts_overview.html#all-automated-assignment',
)
self.verify_help_link(href)
class InstructorDashboardHelp(BaseInstructorDashboardTest):
"""
Tests opening help from the general Help button in the instructor dashboard.
"""
def setUp(self):
super(InstructorDashboardHelp, self).setUp()
self.course_fixture = CourseFixture(**self.course_info).install()
self.log_in_as_instructor()
self.instructor_dashboard_page = self.visit_instructor_dashboard()
def test_instructor_dashboard_help(self):
"""
Scenario: Help button opens staff help
Given that I am viewing the Instructor Dashboard
When I click "Help"
Then I see help about the instructor dashboard in a new tab
"""
href = url_for_help('course_author', '/CA_instructor_dash_help.html')
help_element = self.instructor_dashboard_page.get_help_element()
click_and_wait_for_window(self, help_element)
assert_opened_help_link_is_correct(self, href)
|
BehavioralInsightsTeam/edx-platform
|
common/test/acceptance/tests/lms/test_lms_help.py
|
Python
|
agpl-3.0
| 4,387
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.common.sql.core import *
|
tylertian/Openstack
|
openstack F/keystone/keystone/common/sql/__init__.py
|
Python
|
apache-2.0
| 663
|
# we purposefully raise a NameError at the top level here
undefined_variable
|
DESHRAJ/fjord
|
vendor/packages/nose/functional_tests/test_multiprocessing/support/nameerror.py
|
Python
|
bsd-3-clause
| 79
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
v_net_iso.py
---------------------
Date : December 2015
Copyright : (C) 2015 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'December 2015'
__copyright__ = '(C) 2015, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .v_net import incorporatePoints, variableOutput
def processCommand(alg, parameters, context, feedback):
incorporatePoints(alg, parameters, context, feedback)
def processOutputs(alg, parameters, context, feedback):
outputParameter = {'output': ['output', 'line', 1, True]}
variableOutput(alg, outputParameter, parameters, context)
|
geopython/QGIS
|
python/plugins/processing/algs/grass7/ext/v_net_iso.py
|
Python
|
gpl-2.0
| 1,466
|
#!/usr/bin/python
#
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: dellos6_command
version_added: "2.2"
short_description: Run commands on remote devices running Dell OS6
description:
- Sends arbitrary commands to a Dell OS6 node and returns the results
read from the device. The M(dellos6_command) module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(dellos6_config) to configure Dell OS6 devices.
extends_documentation_fragment: dellos6
options:
commands:
description:
- List of commands to send to the remote dellos6 device over the
configured provider. The resulting output from the command
is returned. If the I(waitfor) argument is provided, the
module is not returned until the condition is satisfied or
the number of I(retries) as expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of I(retries), the task fails.
See examples.
required: false
default: null
retries:
description:
- Specifies the number of retries a command should be tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(waitfor) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
vars:
cli:
host: "{{ inventory_hostname }}"
username: admin
password: admin
transport: cli
tasks:
- name: run show verion on remote devices
dellos6_command:
commands: show version
provider "{{ cli }}"
- name: run show version and check to see if output contains Dell
dellos6_command:
commands: show version
wait_for: result[0] contains Dell
provider "{{ cli }}"
- name: run multiple commands on remote nodes
dellos6_command:
commands:
- show version
- show interfaces
provider "{{ cli }}"
- name: run multiple commands and evaluate the output
dellos6_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Dell
- result[1] contains Access
provider "{{ cli }}"
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
"""
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcli import CommandRunner, FailedConditionsError
from ansible.module_utils.network import NetworkModule, NetworkError
import ansible.module_utils.dellos6
def to_lines(stdout):
for item in stdout:
if isinstance(item, basestring):
item = str(item).split('\n')
yield item
def main():
spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list'),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
module = NetworkModule(argument_spec=spec,
connect_on_load=False,
supports_check_mode=True)
commands = module.params['commands']
conditionals = module.params['wait_for'] or list()
warnings = list()
runner = CommandRunner(module)
for cmd in commands:
if module.check_mode and not cmd.startswith('show'):
warnings.append('only show commands are supported when using '
'check mode, not executing `%s`' % cmd)
else:
if cmd.startswith('conf'):
module.fail_json(msg='dellos6_command does not support running '
'config mode commands. Please use '
'dellos6_config instead')
runner.add_command(cmd)
for item in conditionals:
runner.add_conditional(item)
runner.retries = module.params['retries']
runner.interval = module.params['interval']
try:
runner.run()
except FailedConditionsError:
exc = get_exception()
module.fail_json(msg=str(exc), failed_conditions=exc.failed_conditions)
except NetworkError:
exc = get_exception()
module.fail_json(msg=str(exc))
result = dict(changed=False)
result['stdout'] = list()
for cmd in commands:
try:
output = runner.get_command(cmd)
except ValueError:
output = 'command not executed due to check_mode, see warnings'
result['stdout'].append(output)
result['warnings'] = warnings
result['stdout_lines'] = list(to_lines(result['stdout']))
module.exit_json(**result)
if __name__ == '__main__':
main()
|
romain-dartigues/ansible-modules-core
|
network/dellos6/dellos6_command.py
|
Python
|
gpl-3.0
| 6,526
|