code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for tf_agents.policies.tf_py_policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing.absltest import mock
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.policies import py_policy
from tf_agents.policies import random_py_policy
from tf_agents.policies import tf_py_policy
from tf_agents.specs import array_spec
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import policy_step
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import nest_utils
from tf_agents.utils import test_utils
class TFPyPolicyTest(test_utils.TestCase):
def testRandomPyPolicyGeneratesActionTensors(self):
array_action_spec = array_spec.BoundedArraySpec((7,), np.int32, -10, 10)
observation = tf.ones([3], tf.float32)
time_step = ts.restart(observation)
observation_spec = tensor_spec.TensorSpec.from_tensor(observation)
time_step_spec = ts.time_step_spec(observation_spec)
tf_py_random_policy = tf_py_policy.TFPyPolicy(
random_py_policy.RandomPyPolicy(time_step_spec=time_step_spec,
action_spec=array_action_spec))
batched_time_step = nest_utils.batch_nested_tensors(time_step)
action_step = tf_py_random_policy.action(time_step=batched_time_step)
action, new_policy_state = self.evaluate(
[action_step.action, action_step.state])
self.assertEqual((1,) + array_action_spec.shape, action.shape)
self.assertTrue(np.all(action >= array_action_spec.minimum))
self.assertTrue(np.all(action <= array_action_spec.maximum))
self.assertEqual(new_policy_state, ())
def testAction(self):
py_observation_spec = array_spec.BoundedArraySpec((3,), np.int32, 1, 1)
py_time_step_spec = ts.time_step_spec(py_observation_spec)
py_action_spec = array_spec.BoundedArraySpec((7,), np.int32, 1, 1)
py_policy_state_spec = array_spec.BoundedArraySpec((5,), np.int32, 0, 1)
py_policy_info_spec = array_spec.BoundedArraySpec((3,), np.int32, 0, 1)
mock_py_policy = mock.create_autospec(py_policy.PyPolicy)
mock_py_policy.time_step_spec = py_time_step_spec
mock_py_policy.action_spec = py_action_spec
mock_py_policy.policy_state_spec = py_policy_state_spec
mock_py_policy.info_spec = py_policy_info_spec
expected_py_policy_state = np.ones(py_policy_state_spec.shape,
py_policy_state_spec.dtype)
expected_py_time_step = tf.nest.map_structure(
lambda arr_spec: np.ones((1,) + arr_spec.shape, arr_spec.dtype),
py_time_step_spec)
expected_py_action = np.ones((1,) + py_action_spec.shape,
py_action_spec.dtype)
expected_new_py_policy_state = np.zeros(py_policy_state_spec.shape,
py_policy_state_spec.dtype)
expected_py_info = np.zeros(py_policy_info_spec.shape,
py_policy_info_spec.dtype)
mock_py_policy.action.return_value = policy_step.PolicyStep(
nest_utils.unbatch_nested_array(expected_py_action),
expected_new_py_policy_state, expected_py_info)
tf_mock_py_policy = tf_py_policy.TFPyPolicy(mock_py_policy)
time_step = tf.nest.map_structure(
lambda arr_spec: tf.ones((1,) + arr_spec.shape, arr_spec.dtype),
py_time_step_spec)
action_step = tf_mock_py_policy.action(
time_step, tf.ones(py_policy_state_spec.shape, tf.int32))
py_action_step = self.evaluate(action_step)
self.assertEqual(1, mock_py_policy.action.call_count)
np.testing.assert_equal(
mock_py_policy.action.call_args[1]['time_step'],
nest_utils.unbatch_nested_array(expected_py_time_step))
np.testing.assert_equal(mock_py_policy.action.call_args[1]['policy_state'],
expected_py_policy_state)
np.testing.assert_equal(py_action_step.action, expected_py_action)
np.testing.assert_equal(py_action_step.state, expected_new_py_policy_state)
np.testing.assert_equal(py_action_step.info, expected_py_info)
def testZeroState(self):
policy_state_length = 5
batch_size = 3
mock_py_policy = mock.create_autospec(py_policy.PyPolicy)
observation_spec = array_spec.ArraySpec((3,), np.float32)
mock_py_policy.time_step_spec = ts.time_step_spec(observation_spec)
mock_py_policy.action_spec = array_spec.BoundedArraySpec(
(7,), np.int32, 1, 1)
py_policy_state_spec = array_spec.BoundedArraySpec((policy_state_length,),
np.int32, 1, 1)
# Make the mock policy and reset return value.
mock_py_policy.policy_state_spec = py_policy_state_spec
mock_py_policy.info_spec = ()
expected_py_policy_state = np.zeros(
[batch_size] + list(py_policy_state_spec.shape),
py_policy_state_spec.dtype)
mock_py_policy.get_initial_state.return_value = expected_py_policy_state
tf_mock_py_policy = tf_py_policy.TFPyPolicy(mock_py_policy)
initial_state = tf_mock_py_policy.get_initial_state(batch_size=batch_size)
initial_state_ = self.evaluate(initial_state)
self.assertEqual(1, mock_py_policy.get_initial_state.call_count)
np.testing.assert_equal(initial_state_, expected_py_policy_state)
def testDistributionRaisesNotImplementedError(self):
mock_tf_py_policy = tf_py_policy.TFPyPolicy(
self._get_mock_py_policy())
observation = tf.ones([5], tf.float32)
time_step = ts.restart(observation)
with self.assertRaises(NotImplementedError):
mock_tf_py_policy.distribution(time_step=time_step)
def testVariables(self):
mock_tf_py_policy = tf_py_policy.TFPyPolicy(
self._get_mock_py_policy())
np.testing.assert_equal(mock_tf_py_policy.variables(), [])
def testPyPolicyIsBatchedTrue(self):
action_dims = 5
observation_dims = 3
batch_size = 2
array_action_spec = array_spec.BoundedArraySpec((action_dims,), np.int32,
-10, 10)
observation_spec = array_spec.ArraySpec((observation_dims,), np.float32)
array_time_step_spec = ts.time_step_spec(observation_spec)
observation = tf.ones([batch_size, observation_dims], tf.float32)
time_step = ts.restart(observation, batch_size=batch_size)
tf_py_random_policy = tf_py_policy.TFPyPolicy(
random_py_policy.RandomPyPolicy(time_step_spec=array_time_step_spec,
action_spec=array_action_spec),
py_policy_is_batched=True)
action_step = tf_py_random_policy.action(time_step=time_step)
action = self.evaluate(action_step.action)
self.assertEqual(action.shape, (batch_size, action_dims))
def _get_mock_py_policy(self):
mock_py_policy = mock.create_autospec(py_policy.PyPolicy)
observation_spec = tensor_spec.TensorSpec([5], dtype=tf.float32)
mock_py_policy.time_step_spec = ts.time_step_spec(observation_spec)
mock_py_policy.action_spec = tensor_spec.BoundedTensorSpec(
[3], tf.float32, -1.0, 1.0)
mock_py_policy.policy_state_spec = ()
mock_py_policy.info_spec = ()
return mock_py_policy
if __name__ == '__main__':
tf.test.main()
| tensorflow/agents | tf_agents/policies/tf_py_policy_test.py | Python | apache-2.0 | 8,513 |
#!/usr/bin/env python
# Name: wordsworth
# Description: Frequency analysis tool
# Author: autonomoid
# Date: 2014-06-22
# Licence: GPLv3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import collections
# Font effects --> fancy console colours in bash
underline = "\x1b[1;4m"
black = "\x1b[1;30m"
red = "\x1b[1;31m"
green = "\x1b[1;32m"
yellow = "\x1b[1;33m"
blue = "\x1b[1;34m"
purple = "\x1b[1;35m"
turquoise = "\x1b[1;36m"
normal = "\x1b[0m"
class wordsworth:
args = 0
ignore_list = []
out = 0
words = []
previous_word = ''
previous_pair = ''
previous_triple = ''
previous_quad = ''
max_n_word = 4
n_words = []
prev_n_words = []
counters = []
word_stats = {
'total_chars': 0,
'total_words': 0,
'max_length': 0,
'min_length': 999,
'mean_length': -1,
'longest_word': '',
'shortest_word': '',
'char_counts': {
'a': 0.0, 'b': 0.0, 'c': 0.0, 'd': 0.0, 'e': 0.0, 'f': 0.0,
'g': 0.0, 'h': 0.0, 'i': 0.0, 'j': 0.0, 'k': 0.0, 'l': 0.0,
'm': 0.0, 'n': 0.0, 'o': 0.0, 'p': 0.0, 'q': 0.0, 'r': 0.0,
's': 0.0, 't': 0.0, 'u': 0.0, 'v': 0.0, 'w': 0.0, 'x': 0.0,
'y': 0.0, 'z': 0.0
},
'char_percentages': {
'a': 0.0, 'b': 0.0, 'c': 0.0, 'd': 0.0, 'e': 0.0, 'f': 0.0,
'g': 0.0, 'h': 0.0, 'i': 0.0, 'j': 0.0, 'k': 0.0, 'l': 0.0,
'm': 0.0, 'n': 0.0, 'o': 0.0, 'p': 0.0, 'q': 0.0, 'r': 0.0,
's': 0.0, 't': 0.0, 'u': 0.0, 'v': 0.0, 'w': 0.0, 'x': 0.0,
'y': 0.0, 'z': 0.0
},
'lexical_density': -1
}
def __init__(self, commandline_args):
args = commandline_args
self.ignore_list = str(args.ignore_list).split(",")
def print_n_word_frequencies(self, n_word_counter):
total_entries = sum(n_word_counter.values())
unique_entries = len(n_word_counter)
if total_entries > 0:
m = n_word_counter.most_common(min(unique_entries, args.top_n))
n = len(m[0][0].split(' '))
print '\n===' + blue + ' Commonest ' + str(n) + '-words' + normal + '==='
self.out.write('\n=== Commonest ' + str(n) + '-words ===\n')
for i in range(0, min(unique_entries, args.top_n)):
n_word = m[i][0]
count = m[i][1]
perc = 100.0 * (count / float(total_entries))
print (str(i + 1) + ' = ' + purple + n_word +
normal + ' (' + purple + str(count).split('.')[0] + normal +
' = ' + purple + str(perc)[:5] + '%' + normal + ')')
self.out.write(str(i + 1) + ' = ' + n_word + ' (' + str(count).split('.')[0] +
' = ' + str(perc)[:5] + '%)\n')
def print_results(self):
self.out = open(args.inputfile.split('.')[0] + '-stats.txt', 'w')
print '\n===' + blue + ' RESULTS ' + normal + '==='
self.out.write('=== RESULTS ===\n')
print 'File = ' + purple + str(args.inputfile) + normal
self.out.write('File = ' + str(args.inputfile) + '\n')
print ('Longest word = ' + purple + str(self.word_stats['longest_word']) + normal +
' (' + purple + str(self.word_stats['max_length']) + normal + ')')
self.out.write('Longest word = ' + str(self.word_stats['longest_word']) +
' (' + str(self.word_stats['max_length']) + ')\n')
print ('Shortest word = ' + purple + str(self.word_stats['shortest_word']) + normal +
' (' + purple + str(self.word_stats['min_length']) + normal + ')')
self.out.write('Shortest word = ' + str(self.word_stats['shortest_word']) +
' (' + str(self.word_stats['min_length']) + ')\n')
print ('Mean word length /chars = ' + purple + str(self.word_stats['mean_length']) +
normal)
self.out.write('Mean word length /chars = ' + str(self.word_stats['mean_length']) + '\n')
print ('Total words parsed = ' + purple +
str(self.word_stats['total_words']).split('.')[0] + normal)
self.out.write('Total words parsed = ' +
str(self.word_stats['total_words']).split('.')[0] + '\n')
print ('Total chars parsed = ' + purple + str(self.word_stats['total_chars']) +
normal)
self.out.write('Total chars parsed = ' + str(self.word_stats['total_chars']) + '\n')
for i in range(self.max_n_word):
self.print_n_word_frequencies(self.counters[i])
total_dev = 0.0
print '\n===' + blue + ' FREQUENCY ANALYSIS ' + normal + '==='
self.out.write('\n=== FREQUENCY ANALYSIS ===\n')
for char in sorted(self.word_stats['char_percentages'].iterkeys()):
bar = ''
perc = self.word_stats['char_percentages'][char]
# Percentage deviation from random distribution of characters.
dev = 100.0 * (abs((100.0 / 26.0) - perc) / (100.0 / 26.0))
total_dev += dev
for i in range(0, int(perc)):
bar += '#'
print (char + ' |' + red + bar + normal + ' ' + str(perc)[:4] +
'% (' + str(dev)[:4] + '% deviation from random)')
self.out.write(char + ' |' + bar + ' ' + str(perc)[:4] + '% (' +
str(dev)[:4] + '% deviation from random)\n')
print ('\nTotal percentage deviation from random = ' +
str(total_dev).split('.')[0] + '%')
self.out.write('\nTotal percentage deviation from random = ' +
str(total_dev).split('.')[0] + '%')
average_dev = total_dev / 26.0
print ('Average percentage deviation from random = ' +
str(average_dev)[:4] + '%')
self.out.write('\nAverage percentage deviation from random = ' +
str(average_dev)[:4] + '%')
print ('Lexical density = ' + str(self.word_stats['lexical_density'])[:5] + '%')
self.out.write('\nLexical density = ' + str(self.word_stats['lexical_density'])[:5] + '%')
print '\nWritten results to ' + args.inputfile.split('.')[0] + '-stats.txt\n'
self.out.close()
def init_word_counters(self):
self.max_n_word = args.max_n_word
self.n_words = ['' for i in range(self.max_n_word)]
self.prev_n_words = ['' for i in range(self.max_n_word)]
self.counters = [collections.Counter() for i in range(self.max_n_word)]
def read_file(self):
print "[+] Analysing '" + args.inputfile + "'"
if args.allow_digits:
self.words = re.findall(r"['\-\w]+", open(args.inputfile).read().lower())
else:
self.words = re.findall(r"['\-A-Za-z]+", open(args.inputfile).read().lower())
def compute_stats(self):
for word in self.words:
if word in self.ignore_list:
continue
word = word.strip(r"&^%$#@!")
# Allow hyphenated words, but not hyphens as words on their own.
if word == '-':
continue
length = len(word)
# Record longest word length
if length > self.word_stats['max_length']:
self.word_stats['max_length'] = length
self.word_stats['longest_word'] = word
# Record shortest word length
if length < self.word_stats['min_length']:
self.word_stats['min_length'] = length
self.word_stats['shortest_word'] = word
# Keep track of the total number of words and chars read.
self.word_stats['total_chars'] += length
self.word_stats['total_words'] += 1.0
# Note the charaters in each word.
for char in word:
if char.lower() in self.word_stats['char_counts']:
self.word_stats['char_counts'][char.lower()] += 1.0
# Tally words.
for i in range(1, self.max_n_word):
if self.prev_n_words[i - 1] != '':
self.n_words[i] = self.prev_n_words[i - 1] + ' ' + word
self.counters[i][self.n_words[i]] += 1
self.n_words[0] = word
self.counters[0][word] += 1
for i in range(0, self.max_n_word):
self.prev_n_words[i] = self.n_words[i]
# Calculate the mean word length
self.word_stats['mean_length'] = self.word_stats['total_chars'] / self.word_stats['total_words']
# Calculate relative character frequencies
for char in self.word_stats['char_counts']:
char_count = self.word_stats['char_counts'][char]
total_chars = self.word_stats['total_chars']
percentage = 100.0 * (char_count / total_chars)
self.word_stats['char_percentages'][char] = percentage
# Calculate the lexical density of the text.
total_unique_words = len(self.counters[0])
total_words = sum(self.counters[0].values())
self.word_stats['lexical_density'] = 100.0 * total_unique_words / float(total_words)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Perform letter, word and n-tuple frequency analysis on text files.')
parser.add_argument('--filename', '-f', dest='inputfile', required=True, help='Text file to parse.')
parser.add_argument('--ntuple', '-n', dest='max_n_word', required=False, default=4, type=int, help='The maximum length n-tuple of words. Default is 4.')
parser.add_argument('--top', '-t', dest='top_n', required=False, default=20, type=int, help='List the top t most frequent n-words. Default is 20.')
parser.add_argument('--allow-digits', '-d', dest='allow_digits', default=False, required=False, help='Allow digits to be parsed (true/false). Default is false.')
parser.add_argument('--ignore', '-i', dest='ignore_list', required=False, help='Comma-delimted list of things to ignore')
args = parser.parse_args()
w = wordsworth(args)
w.init_word_counters()
w.read_file()
w.compute_stats()
w.print_results()
| Geoion/wordsworth | wordsworth.py | Python | gpl-3.0 | 11,214 |
# -------------------------------------------------------------------------
# * This program reads a (text) data file and computes the mean, minimum,
# * maximum, and standard deviation. The one-pass algorithm used is due to
# * B.P. Welford (Technometrics, vol. 4, no 3, August 1962.)
# *
# * NOTE: the text data file is assumed to be in a one-value-per-line format
# * with NO blank lines in the file. The data can be either fixed point
# * (integer valued), or floating point (real valued).
# *
# * To use the program, compile it to disk to produce uvs. Then at a command
# * line prompt, uvs can be used in three ways.
# *
# * (1) To have uvs read a disk data file, say uvs.dat (in the format above),
# * at a command line prompt use '<' redirection as:
# *
# * python uvs < uvs.dat
# *
# * (2) To have uvs filter the numerical output of a program, say test, at a
# * command line prompt use '|' pipe as:
# *
# * cat uvs.dat | python uvs.py (best way for Powershell,cmd)
# *
# * (3) To use uvs with keyboard input, at a command line prompt enter:
# *
# * uvs
# *
# * Then enter the data -- one value per line -- being sure to remember to
# * signify an end-of-file. In Unix/Linux, signify an end-of-file by
# * entering ^d (Ctrl-d) as the last line of input.
# *
# * Name : uvs.c (Univariate Statistics)
# * Authors : Steve Park & Dave Geyer
# * Language : ANSI C
# * Latest Revision : 9-28-98
# # Translated by : Philip Steele
# # Language : Python 3.3
# # Latest Revision : 3/26/14
# * -------------------------------------------------------------------------
# */
#include <stdio.h>
#include <math.h>
from math import sqrt
import sys
# long index
# double data
sum = 0.0
# double mean
# double stdev
# double min
# double max
# double diff
data = sys.stdin.readline()
if (data):
data = float(data)
index = 1
mean = data
min = data
max = data
else:
index = 0
data = sys.stdin.readline()
while (data):
data = float(data)
index += 1
diff = data - mean
sum += diff * diff * (index - 1.0) / index
mean += diff / index
if (data > max):
max = data
elif (data < min):
min = data
data = sys.stdin.readline()
#EndWhile
if (index > 0):
stdev = sqrt(sum / index)
print("\nfor a sample of size {0:d}".format(index))
print("mean ................. = {0:7.3f}".format(mean))
print("standard deviation ... = {0:7.3f}".format(stdev))
print("minimum .............. = {0:7.3f}".format(min))
print("maximum .............. = {0:7.3f}".format(max))
# C output:
# for a sample of size 1000
# mean ................. = 3.042
# standard deviation ... = 1.693
# minimum .............. = 0.207
# maximum .............. = 11.219
| pdsteele/DES-Python | uvs.py | Python | mit | 2,844 |
#!/usr/bin/env python
import json, sys, struct
length = 64
registers = 128
# fields = {
# "opcode": 7,
# "register": 7,
# "43bit immediate": 43,
# "50bit immediate": 50
# }
# form:
# %i = instruction lexical code (eg. mov, add, mul, shr)
# %r = integer register (eg. r0, r1, r2...)
# %n = integer literal (eg. #0, #10, #256)
# %q = floating point register (eg. q0, q1, q2...)
# %f = floating point literal (eg. #100.0, #3.1415...)
defaults = {
"opcode": [],
"form": ["%i %r %r %r"],
"writes": [0],
"group": "arithmetic",
"cycles": 1,
"width": 1,
"blocking": True
}
#* Instruction Specification
#*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*#
ins = {
# move instructions
"mov": {
"form": [
"%i %r %r",
"%i %r %n",
"%i %q %q",
"%i %q %f"
],
},
# arithmetic instructions
"add": {
# adds two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
]
},
"sub": {
# subtracts two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
]
},
"mul": {
# multiplies two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
]
},
"div": {
# divides two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
],
"cycles": 2
},
"mod": {
# takes the modulus of a number
"form": [
"%i %r %r %r",
"%i %r %r %n",
],
"cycles": 2
},
"shl": {
"form": [
"%i %r %r %r",
"%i %r %r %n",
]
},
"shr": {
"form": [
"%i %r %r %r",
"%i %r %r %n",
]
},
# vector arithmetic instructions
"v.mov": {
# moves two vector-vector registers/numbers together
"form": [
"%i %r %r",
"%i %r %n",
"%i %q %q",
"%i %q %f"
],
"group": "vector-arithmetic",
"width": 4
},
"vs.mov": {
# moves two vector-scalar registers together
"form": ["%i %r %r"],
"group": "vector-arithmetic",
"width": [4, 1]
},
"v.add": {
# adds two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
],
"group": "vector-arithmetic",
"width": 4
},
"vs.add": {
# adds two vector-scalar registers together
"form": ["%i %r %r %r"],
"group": "vector-arithmetic",
"width": [4, 4, 1]
},
"v.sub": {
# subtracts two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
],
"group": "vector-arithmetic",
"width": 4
},
"vs.sub": {
# subtracts two vector-scalar registers together
"form": ["%i %r %r %r"],
"group": "vector-arithmetic",
"width": [4, 4, 1]
},
"v.mul": {
# multiplies two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
],
"group": "vector-arithmetic",
"width": 4
},
"vs.mul": {
# multiplies two vector-scalar registers together
"form": ["%i %r %r %r"],
"group": "vector-arithmetic",
"width": [4, 4, 1]
},
"v.div": {
# divides two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
"%i %q %q %q",
"%i %q %q %f"
],
"group": "vector-arithmetic",
"width": 4,
"cycles": 2
},
"v.mod": {
# divides two registers/numbers together
"form": [
"%i %r %r %r",
"%i %r %r %n",
],
"group": "vector-arithmetic",
"width": 4,
"cycles": 2
},
"vs.div": {
# divides two vector-scalar registers together
"form": ["%i %r %r %r"],
"group": "vector-arithmetic",
"width": [4, 4, 1],
"cycles": 2
},
# comparison
"gt": {
"form": ["%i %r %r %r", "%i %q %q %q"]
},
"gte": {
"form": ["%i %r %r %r", "%i %q %q %q"]
},
"eq": {
"form": ["%i %r %r %r", "%i %q %q %q"]
},
# load/store
"load": {
"form": [
"%i %r %r",
"%i %r %n",
"%i %q %r",
"%i %q %n"
],
"group": "load-store",
"cycles": 1
},
"store": {
"form": [
"%i %r %r",
"%i %r %n",
"%i %q %r",
"%i %q %n"
],
"writes": [],
"group": "load-store",
"cycles": 1
},
"push": {
"form": ["%i %r", "%i %q"],
"writes": [],
"group": "load-store",
},
"pop": {
"form": ["%i %r", "%i %q"],
"writes": [0],
"group": "load-store",
},
"spr": {
"form": ["%i %r", "%i %n"],
"writes": [],
"group": "load-store",
},
# IO
"io.read": {
"form": ["%i %r", "%i %q"],
"writes": [0],
"group": "load-store",
},
"io.write": {
"form": ["%i %r", "%i %n", "%i %q", "%i %f"],
"writes": [],
"group": "load-store",
},
"io.getc": {
"form": ["%i", "%i %r"],
"writes": [0],
"group": "load-store",
},
"io.putc": {
"form": ["%i %r", "%i %n"],
"writes": [],
"group": "load-store",
},
# branch
"jmp": {
"form": [
"%i %r",
"%i %n"
],
"writes": [],
"group": "branch"
},
"br": {
"form": [
"%i %r %r",
"%i %r %n",
],
"writes": [],
"group": "branch"
},
"nbr": {
"form": [
"%i %r %r",
"%i %r %n",
],
"writes": [],
"group": "branch",
},
"call": {
"form": [
"%i %r",
"%i %n"
],
"writes": [],
"group": "branch"
},
"return": {
"form": ["%i"],
"writes": [],
"group": "branch"
},
# logical (these are bitwise)
"and": {
"form": [
"%i %r %r %r",
"%i %r %r %n",
]
},
"or": {
"form": [
"%i %r %r %r",
"%i %r %r %n",
]
},
"xor": {
"form": [
"%i %r %r %r",
"%i %r %r %n",
]
},
"not": {
"form": [
"%i %r %r",
"%i %r %n",
]
},
# # shift
# "sl": {},
# "sr": {},
# simulator instructions
"nop": {
"form": ["%i"],
"writes": [],
},
"halt": {
"opcode": [0],
"form": ["%i"],
"writes": [],
"group": "arithmetic",
"cycles": 1,
"width": 1,
"blocking": True
},
"print": {
"form": ["%i", "%i %r"]
}
# "": {},
}
rins = {0: "halt"}
def assign_opcodes():
keys = sorted([k for k in ins.keys() if k != "halt"])
counter = 1
for key in keys:
ins[key] = dict(defaults.items() + ins[key].items())
ins[key]["opcode"] = []
if "form" not in ins[key]:
print "Error: All instructions must have atleast 1 form!"
for s in ins[key]["form"]:
ins[key]["opcode"].append(counter)
rins[counter] = key
counter += 1
if counter > pow(2, 7):
print "Error: Too many instructions to fit in the opcode!"
return
# returns a field from an integer
def field(bytes, length, offset):
return (bytes & ((1<<length)-1 << 64-offset-length)) >> 64-offset-length
# convert from twos complement to decimal
def twos_to_dec(num, bits=32):
n = num & (1<<bits)-1
return max( n if n < (2**bits)/2 else n-(2**bits), -(2**bits)/2)
# converts decimal to twos complement
def dec_to_twos(num, bits=32):
return (num + (1 << bits)) % (1 << bits)
def float_to_bits(f):
return struct.unpack('>l', struct.pack('>f', f))[0]
def bits_to_float(b):
return struct.unpack('>f', struct.pack('>l', b))[0]
#* Main - For testing really
#*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*#
if __name__ == "__main__":
print "language testing"
assign_opcodes()
print "total number of instructions:",len(rins)
# sys.stdout.write(json.dumps(ins, sort_keys=True, indent=4, separators=(',', ': ')))
# print float_to_bits(100.0)
# for k,i in zip(ins.keys(),ins.values()):
# print k, i["opcode"]
# # print bin(i["opcode"]) | brgmnn/uob-cpu-simulator | language.py | Python | mit | 7,347 |
# A comment, this is so you can read your program later
# Anything after # is ignored by pyton
print "I Could have code like this" #and the comment after is ignored
# You can also use a comment to "disable" or comment out a piece of code:
# print "This won't run."
print "This will run." | Torugo/learnpythonthehardway | ex2.py | Python | mit | 290 |
import numpy as np
from scipy.misc import imread, imsave
from glob import glob
# This function allows us to place in the
# brightest pixels per x and y position between
# two images. It is similar to PIL's
# ImageChop.Lighter function.
def chop_lighter(image1, image2):
s1 = np.sum(image1, axis=2)
s2 = np.sum(image2, axis=2)
index = s1 < s2
image1[index, 0] = image2[index, 0]
image1[index, 1] = image2[index, 1]
image1[index, 2] = image2[index, 2]
return image1
# Getting the list of files in the directory
files = glob('space/*.JPG')
# Opening up the first image for looping
im1 = imread(files[0]).astype(np.float32)
im2 = np.copy(im1)
# Starting loop
for i in xrange(1, len(files)):
print i
im = imread(files[i]).astype(np.float32)
# Same before
im1 += im
# im2 image shows star trails better
im2 = chop_lighter(im2, im)
# Saving image with slight tweaking on the combination
# of the two images to show star trails with the
# co-added image.
imsave('scipy_36_ex2.jpg', im1 / im1.max() + im2 / im2.max() * 0.2)
| ebressert/ScipyNumpy_book_examples | python_examples/scipy_36_ex2.py | Python | mit | 1,073 |
"""Setup script for the TROLS Stats project.
"""
import setuptools
PACKAGES = [
'configa>=1.0.0',
'dropbox>=7.2.1',
'filer>=1.0.0',
'lxml>=3.7.2',
'pylint>=1.6.4',
'pytest>=2.9.2',
'pytest-cov>=2.3.0',
'sphinx_rtd_theme>=0.1.10a0',
'twine',
'Sphinx>=1.4.5',
]
SETUP_KWARGS = {
'name': 'trols-stats',
'version': '1.0.2',
'description': 'TROLS statistics data model and utils',
'author': 'Lou Markovski',
'author_email': 'lou.markovski@gmail.com',
'url': 'https://github.com/loum/trols-stats',
'install_requires': PACKAGES,
'packages': setuptools.find_packages(),
'package_data': {
'trols_stats': [
],
},
'license': 'MIT',
'classifiers': [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
}
setuptools.setup(**SETUP_KWARGS)
| loum/trols-stats | setup.py | Python | gpl-2.0 | 1,039 |
# Copyright 2014, Doug Wiegley (dougwig), A10 Networks
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import a10_neutron_lbaas.tests.test_case as test_case
import mock
import a10_neutron_lbaas.a10_openstack_lb as a10_os
import a10_neutron_lbaas.plumbing_hooks as hooks
def _build_openstack_context():
admin_context = {
"tenant_id": "admin"
}
return mock.Mock(admin_context=admin_context)
class FakeA10OpenstackLB(object):
def __init__(self, openstack_driver, **kw):
super(FakeA10OpenstackLB, self).__init__(
mock.MagicMock(),
**kw)
self.openstack_driver = mock.MagicMock()
self.plumbing_hooks = hooks.PlumbingHooks(self)
self.openstack_context = _build_openstack_context()
def _get_a10_client(self, device_info, **kwargs):
self.device_info = device_info
self.last_client = mock.MagicMock()
return self.last_client
def reset_mocks(self):
self.openstack_driver = mock.MagicMock()
self.plumbing_hooks = hooks.PlumbingHooks(self)
self.last_client = self._get_a10_client(self.device_info)
return self.last_client
class FakeA10OpenstackLBV1(FakeA10OpenstackLB, a10_os.A10OpenstackLBV1):
pass
class FakeA10OpenstackLBV2(FakeA10OpenstackLB, a10_os.A10OpenstackLBV2):
def __init__(self, openstack_driver, **kw):
super(FakeA10OpenstackLBV2, self).__init__(
openstack_driver,
neutron_hooks_module=mock.MagicMock(),
**kw)
self.certmgr = mock.Mock()
class UnitTestBase(test_case.TestCase):
def _build_openstack_context(self):
return _build_openstack_context()
def setUp(self, openstack_lb_args={}):
unit_dir = os.path.dirname(__file__)
unit_config = os.path.join(unit_dir, "unit_config")
os.environ['A10_CONFIG_DIR'] = unit_config
if 'provider' not in openstack_lb_args:
openstack_lb_args['provider'] = 'units'
if not hasattr(self, 'version') or self.version == 'v2':
self.a = FakeA10OpenstackLBV2(mock.MagicMock(), **openstack_lb_args)
else:
self.a = FakeA10OpenstackLBV1(mock.MagicMock(), **openstack_lb_args)
def print_mocks(self):
print("OPENSTACK ", self.a.openstack_driver.mock_calls)
print("CLIENT ", self.a.last_client.mock_calls)
def empty_mocks(self):
self.print_mocks()
self.assertEqual(0, len(self.a.openstack_driver.mock_calls))
self.assertEqual(0, len(self.a.last_client.mock_calls))
def empty_close_mocks(self):
self.print_mocks()
self.assertEqual(0, len(self.a.openstack_driver.mock_calls))
self.assertEqual(1, len(self.a.last_client.mock_calls))
self.a.last_client.session.close.assert_called_with()
| dougwig/a10-neutron-lbaas | a10_neutron_lbaas/tests/unit/test_base.py | Python | apache-2.0 | 3,354 |
"""URLs for API and sample views."""
from django.conf.urls import include, patterns, url
from django.views.generic import TemplateView
from django.views.generic.base import RedirectView
from mdn.urls import mdn_urlpatterns
from webplatformcompat.v1.routers import router as v1_router
from webplatformcompat.v2.routers import router as v2_router
from .views import ViewFeature
webplatformcompat_urlpatterns = patterns(
'',
url(r'^$', TemplateView.as_view(
template_name='webplatformcompat/home.html'),
name='home'),
url(r'^about/', TemplateView.as_view(
template_name='webplatformcompat/about.html'),
name='about'),
url(r'^browse/', TemplateView.as_view(
template_name='webplatformcompat/browse.html'),
name='browse'),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
url(r'^api/$', TemplateView.as_view(
template_name='webplatformcompat/api.html'),
name='api'),
url(r'^api/v1/', include(v1_router.urls, namespace='v1')),
url(r'^api/v2/', include(v2_router.urls, namespace='v2')),
url(r'^importer$', RedirectView.as_view(
url='/importer/', permanent=False)),
url(r'^importer/', include(mdn_urlpatterns)),
url(r'^view_feature/(?P<feature_id>\d+)(.html)?$', ViewFeature.as_view(
template_name='webplatformcompat/feature-js.html'),
name='view_feature'),
)
| jwhitlock/web-platform-compat | webplatformcompat/urls.py | Python | mpl-2.0 | 1,426 |
class Solution:
def __init__(self, arr):
n, m = len(arr), len(arr[0])
self.arr = arr
self.values = {m*i+j: arr[i][j] for i in range(n) for j in range(m)}
self.visitedEdge = set()
self.visitedNode = [False for i in range(n*m)]
self.ans = -float('inf')
def _in_last_row(self, node):
n = len(self.arr)
m = len(self.arr[0])
if (n-1)*m <= node < n*m:
return True
else:
return False
def _buildGraph(self):
arr = self.arr
n = len(self.arr)
m = len(self.arr[0])
G = [[] for i in range(n*m+1)]
for i in range(n):
for j in range(m):
G[i*m + j].append(i*m + j-1) if j > 0 else None
G[i*m + j].append((i+1)*m + j) if i < n-1 else None
G[i*m + j].append(i*m + j+1) if j < m-1 else None
for i in range(m):
G[n*m].append(i)
self.values[n*m] = 0
self.visitedNode.append(False)
# print(G)
# print(G, self.values, self.visitedNode)
return G
def dfs(self, G, u, s, path=''):
print(path)
s += self.values[u] if not self.visitedNode[u] else 0
self.visitedNode[u] +=1
if self._in_last_row(u):
# print('s',s, self.ans)
self.ans = max(self.ans, s)
for v in G[u]: # pylint: disable=C0103
if self.visitedNode[v] < 2:
self.dfs(G, v, s, path=path+'{} '.format(u))
# s -= self.values[u] if self.visitedNode[u] == 2 else 0
self.visitedNode[u] -=1
def maxans(self):
graph = self._buildGraph()
u = len(self.arr)*len(self.arr[0])
self.dfs(G=graph, u=u, s=0)
return self.ans
if __name__ == "__main__":
n, m = list(map(int, input().split()))
Arr = []
for i in range(n):
Arr.append(list(map(int, input().split())))
solution = Solution(arr=Arr)
print(solution.maxans())
| opethe1st/CompetitiveProgramming | Hackerrank/WeekOfCode/35/matrix_land.py | Python | gpl-3.0 | 1,994 |
from dataArtist.widgets.Tool import Tool
PLOT_SYMBOLS = ['None', 'o', 's', 't', 'd', '+', 'x']
class Symbols(Tool):
'''
change the graphs symbol
'''
icon = 'symbol.svg'
def __init__(self, plotDisplay):
Tool.__init__(self, plotDisplay)
self.setParameterMenu()
self._menu.aboutToShow.connect(self._updateMenu)
def clear(self):
self._menu.p.clearChildren()
#self._menu.aboutToHide.connect(lambda: self._menu.p.clearChildren())
def _updateMenu(self):
'''
add a parameter to change the graphs symbol for each layer
'''
self.clear()
curves = self.display.widget.curves
for n, c in enumerate(curves):
name = c.label.text
if not name:
name = 'Plot %s' % str(n + 1)
p = self._menu.p.addChild({
'name': name,
'type': 'list',
'limits': PLOT_SYMBOLS,
'autoIncrementName': True,
# set current symbol as first option:
'value': c.opts['symbol']})
# SET SYMBOL:
p.sigValueChanged.connect(
lambda param, val, c=c:
c.setSymbol(val) if val != 'None' else c.setSymbol(None))
| radjkarl/dataArtist | DUMP/Symbols.py | Python | gpl-3.0 | 1,285 |
"""
A manangement command to populate the new available_date field in all CourseCertificates
in credentials. Accomplished by sending the COURSE_CERT_DATE_CHANGE signal accross all
course runs in the LMS to call a new API in credentials that will populate the date if one
is found.
This command is designed to be ran once to backpopulate data. New courses added or any time
the COURSE_CERT_DATE_CHANGE signal fires, the API will automatically be called as a part of
that flow.
"""
from django.core.management.base import BaseCommand
from openedx.core.djangoapps.credentials.tasks.v1.tasks import backfill_date_for_all_course_runs
class Command(BaseCommand):
"""
A command to populate the available_date field in the CourseCertificate model for every
course run inside of the LMS.
"""
def handle(self, *args, **options):
backfill_date_for_all_course_runs.delay()
| edx/edx-platform | openedx/core/djangoapps/credentials/management/commands/update_credentials_available_date.py | Python | agpl-3.0 | 894 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import tempfile
import boto3
import mock
import pytest
from botocore.exceptions import NoCredentialsError
from airflow import AirflowException
from airflow.models import Connection
from airflow.providers.amazon.aws.hooks.s3 import S3Hook, provide_bucket_name
try:
from moto import mock_s3
except ImportError:
mock_s3 = None
@pytest.mark.skipif(mock_s3 is None, reason='moto package not present')
class TestAwsS3Hook:
@mock_s3
def test_get_conn(self):
hook = S3Hook()
assert hook.get_conn() is not None
def test_parse_s3_url(self):
parsed = S3Hook.parse_s3_url("s3://test/this/is/not/a-real-key.txt")
assert parsed == ("test", "this/is/not/a-real-key.txt"), "Incorrect parsing of the s3 url"
def test_check_for_bucket(self, s3_bucket):
hook = S3Hook()
assert hook.check_for_bucket(s3_bucket) is True
assert hook.check_for_bucket('not-a-bucket') is False
def test_check_for_bucket_raises_error_with_invalid_conn_id(self, s3_bucket, monkeypatch):
monkeypatch.delenv('AWS_PROFILE', raising=False)
monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False)
monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False)
hook = S3Hook(aws_conn_id="does_not_exist")
with pytest.raises(NoCredentialsError):
hook.check_for_bucket(s3_bucket)
@mock_s3
def test_get_bucket(self):
hook = S3Hook()
assert hook.get_bucket('bucket') is not None
@mock_s3
def test_create_bucket_default_region(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket')
assert hook.get_bucket('new_bucket') is not None
@mock_s3
def test_create_bucket_us_standard_region(self, monkeypatch):
monkeypatch.delenv('AWS_DEFAULT_REGION', raising=False)
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-1')
bucket = hook.get_bucket('new_bucket')
assert bucket is not None
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint', None)
# https://github.com/spulec/moto/pull/1961
# If location is "us-east-1", LocationConstraint should be None
assert region is None
@mock_s3
def test_create_bucket_other_region(self):
hook = S3Hook()
hook.create_bucket(bucket_name='new_bucket', region_name='us-east-2')
bucket = hook.get_bucket('new_bucket')
assert bucket is not None
region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint', None)
assert region == 'us-east-2'
def test_check_for_prefix(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='dir/', delimiter='/') is True
assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='a', delimiter='/') is False
def test_list_prefixes(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert hook.list_prefixes(s3_bucket, prefix='non-existent/') is None
assert ['dir/'] == hook.list_prefixes(s3_bucket, delimiter='/')
assert ['a'] == hook.list_keys(s3_bucket, delimiter='/')
assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/')
def test_list_prefixes_paged(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
# we dont need to test the paginator that's covered by boto tests
keys = ["%s/b" % i for i in range(2)]
dirs = ["%s/" % i for i in range(2)]
for key in keys:
bucket.put_object(Key=key, Body=b'a')
assert sorted(dirs) == sorted(hook.list_prefixes(s3_bucket, delimiter='/', page_size=1))
def test_list_keys(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
bucket.put_object(Key='dir/b', Body=b'b')
assert hook.list_keys(s3_bucket, prefix='non-existent/') is None
assert ['a', 'dir/b'] == hook.list_keys(s3_bucket)
assert ['a'] == hook.list_keys(s3_bucket, delimiter='/')
assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/')
def test_list_keys_paged(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
keys = [str(i) for i in range(2)]
for key in keys:
bucket.put_object(Key=key, Body=b'a')
assert sorted(keys) == sorted(hook.list_keys(s3_bucket, delimiter='/', page_size=1))
def test_check_for_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
assert hook.check_for_key('a', s3_bucket) is True
assert hook.check_for_key('s3://{}//a'.format(s3_bucket)) is True
assert hook.check_for_key('b', s3_bucket) is False
assert hook.check_for_key('s3://{}//b'.format(s3_bucket)) is False
def test_check_for_key_raises_error_with_invalid_conn_id(self, monkeypatch, s3_bucket):
monkeypatch.delenv('AWS_PROFILE', raising=False)
monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False)
monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False)
hook = S3Hook(aws_conn_id="does_not_exist")
with pytest.raises(NoCredentialsError):
hook.check_for_key('a', s3_bucket)
def test_get_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='a', Body=b'a')
assert hook.get_key('a', s3_bucket).key == 'a'
assert hook.get_key('s3://{}/a'.format(s3_bucket)).key == 'a'
def test_read_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='my_key', Body=b'Cont\xC3\xA9nt')
assert hook.read_key('my_key', s3_bucket) == 'Contént'
# As of 1.3.2, Moto doesn't support select_object_content yet.
@mock.patch('airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook.get_client_type')
def test_select_key(self, mock_get_client_type, s3_bucket):
mock_get_client_type.return_value.select_object_content.return_value = \
{'Payload': [{'Records': {'Payload': b'Cont\xC3\xA9nt'}}]}
hook = S3Hook()
assert hook.select_key('my_key', s3_bucket) == 'Contént'
def test_check_for_wildcard_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='abc', Body=b'a')
bucket.put_object(Key='a/b', Body=b'a')
assert hook.check_for_wildcard_key('a*', s3_bucket) is True
assert hook.check_for_wildcard_key('abc', s3_bucket) is True
assert hook.check_for_wildcard_key('s3://{}//a*'.format(s3_bucket)) is True
assert hook.check_for_wildcard_key('s3://{}//abc'.format(s3_bucket)) is True
assert hook.check_for_wildcard_key('a', s3_bucket) is False
assert hook.check_for_wildcard_key('b', s3_bucket) is False
assert hook.check_for_wildcard_key('s3://{}//a'.format(s3_bucket)) is False
assert hook.check_for_wildcard_key('s3://{}//b'.format(s3_bucket)) is False
def test_get_wildcard_key(self, s3_bucket):
hook = S3Hook()
bucket = hook.get_bucket(s3_bucket)
bucket.put_object(Key='abc', Body=b'a')
bucket.put_object(Key='a/b', Body=b'a')
# The boto3 Class API is _odd_, and we can't do an isinstance check as
# each instance is a different class, so lets just check one property
# on S3.Object. Not great but...
assert hook.get_wildcard_key('a*', s3_bucket).key == 'a/b'
assert hook.get_wildcard_key('a*', s3_bucket, delimiter='/').key == 'abc'
assert hook.get_wildcard_key('abc', s3_bucket, delimiter='/').key == 'abc'
assert hook.get_wildcard_key('s3://{}/a*'.format(s3_bucket)).key == 'a/b'
assert hook.get_wildcard_key('s3://{}/a*'.format(s3_bucket), delimiter='/').key == 'abc'
assert hook.get_wildcard_key('s3://{}/abc'.format(s3_bucket), delimiter='/').key == 'abc'
assert hook.get_wildcard_key('a', s3_bucket) is None
assert hook.get_wildcard_key('b', s3_bucket) is None
assert hook.get_wildcard_key('s3://{}/a'.format(s3_bucket)) is None
assert hook.get_wildcard_key('s3://{}/b'.format(s3_bucket)) is None
def test_load_string(self, s3_bucket):
hook = S3Hook()
hook.load_string("Contént", "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Cont\xC3\xA9nt'
def test_load_bytes(self, s3_bucket):
hook = S3Hook()
hook.load_bytes(b"Content", "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Content'
def test_load_fileobj(self, s3_bucket):
hook = S3Hook()
with tempfile.TemporaryFile() as temp_file:
temp_file.write(b"Content")
temp_file.seek(0)
hook.load_file_obj(temp_file, "my_key", s3_bucket)
resource = boto3.resource('s3').Object(s3_bucket, 'my_key') # pylint: disable=no-member
assert resource.get()['Body'].read() == b'Content'
@mock.patch.object(S3Hook, 'get_connection', return_value=Connection(schema='test_bucket'))
def test_provide_bucket_name(self, mock_get_connection):
class FakeS3Hook(S3Hook):
@provide_bucket_name
def test_function(self, bucket_name=None):
return bucket_name
# pylint: disable=unused-argument
@provide_bucket_name
def test_function_with_key(self, key, bucket_name=None):
return bucket_name
# pylint: disable=unused-argument
@provide_bucket_name
def test_function_with_wildcard_key(self, wildcard_key, bucket_name=None):
return bucket_name
fake_s3_hook = FakeS3Hook()
test_bucket_name = fake_s3_hook.test_function()
test_bucket_name_with_key = fake_s3_hook.test_function_with_key('test_key')
test_bucket_name_with_wildcard_key = fake_s3_hook.test_function_with_wildcard_key('test_*_key')
assert test_bucket_name == mock_get_connection.return_value.schema
assert test_bucket_name_with_key is None
assert test_bucket_name_with_wildcard_key is None
def test_delete_objects_key_does_not_exist(self, s3_bucket):
hook = S3Hook()
with pytest.raises(AirflowException) as err:
hook.delete_objects(bucket=s3_bucket, keys=['key-1'])
assert isinstance(err.value, AirflowException)
assert str(err.value) == "Errors when deleting: ['key-1']"
def test_delete_objects_one_key(self, mocked_s3_res, s3_bucket):
key = 'key-1'
mocked_s3_res.Object(s3_bucket, key).put(Body=b'Data')
hook = S3Hook()
hook.delete_objects(bucket=s3_bucket, keys=[key])
assert [o.key for o in mocked_s3_res.Bucket(s3_bucket).objects.all()] == []
def test_delete_objects_many_keys(self, mocked_s3_res, s3_bucket):
num_keys_to_remove = 1001
keys = []
for index in range(num_keys_to_remove):
key = 'key-{}'.format(index)
mocked_s3_res.Object(s3_bucket, key).put(Body=b'Data')
keys.append(key)
assert sum(1 for _ in mocked_s3_res.Bucket(s3_bucket).objects.all()) == num_keys_to_remove
hook = S3Hook()
hook.delete_objects(bucket=s3_bucket, keys=keys)
assert [o.key for o in mocked_s3_res.Bucket(s3_bucket).objects.all()] == []
| lyft/incubator-airflow | tests/providers/amazon/aws/hooks/test_s3.py | Python | apache-2.0 | 12,862 |
from distutils.core import setup, Extension
import os
import socket
hnm = socket.gethostname()
# def read(fname):
# return open(os.path.join(os.path.dirname(__file__), fname)).read()
if 'zog' in hnm:
comedi_poll = Extension('comedi_poll',
include_dirs = ['/usr/local/include'],
libraries = ['comedi'],
library_dirs = ['/usr/local/lib'],
sources = ['src/comedi_poll.c'])
setup(
name = 'pyoperant',
version = '0.1.2',
author = 'Justin Kiggins',
author_email = 'justin.kiggins@gmail.com',
description = 'hardware interface and controls for operant conditioning',
long_description = open('docs/README.rst', 'rt').read(),
packages = ['pyoperant'],
requires = ['pyephem','numpy'],
scripts = [
'scripts/behave',
'scripts/pyoperantctl',
'scripts/mutate_config_file',
],
license = "BSD",
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Natural Language :: English",
"Operating System :: Unix",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
],
# ext_modules = [comedi_poll]
)
| gentnerlab/pyoperant | setup.py | Python | bsd-3-clause | 1,381 |
"""
Slight abstraction of the filesystem calls to allow for other types of storage
"""
import os
def makedirs(dirname):
if dirname.startswith("s3://"):
# S3 will make the directories when we submit the file
return
else:
assert dirname.startswith("/"), "dirname must be absolute"
bits = dirname.split(os.sep)[1:]
root = "/"
for bit in bits:
root = os.path.join(root, bit)
if not os.path.lexists(root):
os.mkdir(root)
elif not os.path.isdir(root):
raise OSError("%s is exists, but is not a directory." % (root, ))
else: # exists and is a dir
pass
def file_exists(original_file):
"""
Check to make sure the original file exists
"""
if original_file.startswith("s3://"):
from filesystem import s3
return s3.file_exists(original_file)
else:
if not os.path.exists(original_file):
return False
if not os.path.isfile(original_file):
return False
return True
| callowayproject/Transmogrify | transmogrify/filesystem/__init__.py | Python | apache-2.0 | 1,051 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
comando PLINE per disegnare una linea
-------------------
begin : 2013-07-15
copyright : iiiii
email : hhhhh
developers : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qad_getpoint import *
from qad_line_maptool import *
from qad_generic_cmd import QadCommandClass
from qad_msg import QadMsg
from qad_textwindow import *
from qad_snapper import *
import qad_utils
import qad_layer
from qad_rubberband import createRubberBand
# Classe che gestisce il comando LINE
class QadLINECommandClass(QadCommandClass):
def instantiateNewCmd(self):
""" istanzia un nuovo comando dello stesso tipo """
return QadLINECommandClass(self.plugIn)
def getName(self):
return QadMsg.translate("Command_list", "LINE")
def getEnglishName(self):
return "LINE"
def connectQAction(self, action):
QObject.connect(action, SIGNAL("triggered()"), self.plugIn.runLINECommand)
def getIcon(self):
return QIcon(":/plugins/qad/icons/line.png")
def getNote(self):
# impostare le note esplicative del comando
return QadMsg.translate("Command_LINE", "Creates straight line segments.")
def __init__(self, plugIn):
QadCommandClass.__init__(self, plugIn)
self.vertices = []
self.rubberBand = createRubberBand(self.plugIn.canvas, QGis.Line)
self.firstPtTan = None
self.firstPtPer = None
# se questo flag = True il comando serve all'interno di un altro comando per disegnare una linea
# che non verrà salvata su un layer
self.virtualCmd = False
def __del__(self):
QadCommandClass.__del__(self)
self.rubberBand.hide()
self.plugIn.canvas.scene().removeItem(self.rubberBand)
def getPointMapTool(self, drawMode = QadGetPointDrawModeEnum.NONE):
if (self.plugIn is not None):
if self.PointMapTool is None:
self.PointMapTool = Qad_line_maptool(self.plugIn)
return self.PointMapTool
else:
return None
def addVertex(self, point):
self.vertices.append(point)
self.addPointToRubberBand(point)
self.plugIn.setLastPointAndSegmentAng(self.vertices[-1])
self.setTmpGeometriesToMapTool()
def delLastVertex(self):
if len(self.vertices) > 0:
del self.vertices[-1] # cancello ultimo vertice
self.removeLastPointToRubberBand()
if len(self.vertices) > 0:
self.plugIn.setLastPointAndSegmentAng(self.vertices[-1])
self.setTmpGeometriesToMapTool()
#============================================================================
# addPointToRubberBand
#============================================================================
def addPointToRubberBand(self, point, doUpdate = True):
numberOfVertices = self.rubberBand.numberOfVertices()
if numberOfVertices == 2:
# per un baco non ancora capito: se la linea ha solo 2 vertici e
# hanno la stessa x o y (linea orizzontale o verticale)
# la linea non viene disegnata perciò sposto un pochino la x o la y
adjustedPoint = qad_utils.getAdjustedRubberBandVertex(self.rubberBand.getPoint(0, 0), point)
self.rubberBand.addPoint(adjustedPoint, doUpdate)
else:
self.rubberBand.addPoint(point, doUpdate)
#============================================================================
# removeLastPointToRubberBand
#============================================================================
def removeLastPointToRubberBand(self):
self.rubberBand.removeLastPoint()
def addLinesToLayer(self, layer):
i = 1
while i < len(self.vertices):
qad_layer.addLineToLayer(self.plugIn, layer,
[self.vertices[i - 1], self.vertices[i]])
i = i + 1
#============================================================================
# setTmpGeometriesToMapTool
#============================================================================
def setTmpGeometriesToMapTool(self):
self.getPointMapTool().clearTmpGeometries()
i = 1
while i < len(self.vertices):
# per lo snap aggiungo questa geometria temporanea
self.getPointMapTool().appendTmpGeometry(QgsGeometry.fromPolyline([self.vertices[i - 1], self.vertices[i]]))
i = i + 1
def run(self, msgMapTool = False, msg = None):
if self.plugIn.canvas.mapSettings().destinationCrs().geographicFlag():
self.showMsg(QadMsg.translate("QAD", "\nThe coordinate reference system of the project must be a projected coordinate system.\n"))
return True # fine comando
if self.virtualCmd == False: # se si vuole veramente salvare la polylinea in un layer
currLayer, errMsg = qad_layer.getCurrLayerEditable(self.plugIn.canvas, QGis.Line)
if currLayer is None:
self.showErr(errMsg)
return True # fine comando
# RICHIESTA PRIMO PUNTO
if self.step == 0: # inizio del comando
# imposto il map tool
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.NONE_KNOWN_ASK_FOR_FIRST_PT)
# si appresta ad attendere un punto o enter
# msg, inputType, default, keyWords, nessun controllo
self.waitFor(QadMsg.translate("Command_LINE", "Specify first point: "), \
QadInputTypeEnum.POINT2D, None, "", QadInputModeEnum.NONE)
self.step = 1
return False
#=========================================================================
# RISPOSTA ALLA RICHIESTA PUNTO OPPURE MENU PRINCIPALE
elif self.step == 1: # dopo aver atteso un punto si riavvia il comando
if msgMapTool == True: # il punto arriva da una selezione grafica
# la condizione seguente si verifica se durante la selezione di un punto
# é stato attivato un altro plugin che ha disattivato Qad
# quindi stato riattivato il comando che torna qui senza che il maptool
# abbia selezionato un punto
if self.getPointMapTool().point is None: # il maptool é stato attivato senza un punto
if self.getPointMapTool().rightButton == True: # se usato il tasto destro del mouse
if self.virtualCmd == False: # se si vuole veramente salvare in un layer
self.addLinesToLayer(currLayer)
return True # fine comando
else:
self.setMapTool(self.getPointMapTool()) # riattivo il maptool
return False
snapTypeOnSel = self.getPointMapTool().snapTypeOnSelection
value = self.getPointMapTool().point
entity = self.getPointMapTool().entity
else: # il punto arriva come parametro della funzione
value = msg
snapTypeOnSel = QadSnapTypeEnum.NONE
if type(value) == unicode:
if value == QadMsg.translate("Command_LINE", "Undo") or value == "Undo":
self.delLastVertex() # cancello ultimo vertice
# imposto il map tool
if len(self.vertices) == 0:
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.NONE_KNOWN_ASK_FOR_FIRST_PT)
# si appresta ad attendere un punto o enter
# msg, inputType, default, keyWords, nessun controllo
self.waitFor(QadMsg.translate("Command_LINE", "Specify first point: "), \
QadInputTypeEnum.POINT2D, None, "", QadInputModeEnum.NONE)
return False
else:
self.getPointMapTool().firstPt = self.vertices[-1]
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
elif value == QadMsg.translate("Command_LINE", "Close") or value == "Close":
newPt = self.vertices[0]
self.addVertex(newPt) # aggiungo un nuovo vertice
if self.virtualCmd == False: # se si vuole veramente salvare in un layer
self.addLinesToLayer(currLayer)
return True # fine comando
else:
if len(self.vertices) == 0: # primo punto
if value is None:
if self.plugIn.lastPoint is not None:
value = self.plugIn.lastPoint
else:
return True # fine comando
# se é stato selezionato un punto con la modalità TAN_DEF é un punto differito
if snapTypeOnSel == QadSnapTypeEnum.TAN_DEF and entity.isInitialized():
# se era stato selezionato un punto esplicito
if (self.firstPtTan is None) and (self.firstPtPer is None):
self.firstPtPer = None
self.firstPtTan = value
self.firstGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
self.firstGeom.transform(coordTransform)
# imposto il map tool
self.getPointMapTool().tan1 = self.firstPtTan
self.getPointMapTool().geom1 = self.firstGeom
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_TAN_KNOWN_ASK_FOR_SECOND_PT)
# se era stato selezionato un punto con la modalità TAN_DEF
elif self.firstPtTan is not None:
secondGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
secondGeom.transform(coordTransform)
tangent = qad_utils.lineFrom2TanPts(self.firstGeom, self.firstPtTan, secondGeom, value)
if tangent is not None:
# prendo il punto più vicino a value
if qad_utils.getDistance(tangent[0], value) < qad_utils.getDistance(tangent[1], value):
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[0]
else:
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[1]
# imposto il map tool
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
else:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo tangent possible"))
# se era stato selezionato un punto con la modalità PER_DEF
elif self.firstPtPer is not None:
secondGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
secondGeom.transform(coordTransform)
tangent = qad_utils.lineFromTanPerPts(secondGeom, value, self.firstGeom, self.firstPtPer)
if tangent is not None:
# prendo il punto più vicino a value
if qad_utils.getDistance(tangent[0], value) < qad_utils.getDistance(tangent[1], value):
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[0]
else:
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[1]
# imposto il map tool
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
else:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo tangent possible"))
# se é stato selezionato un punto con la modalità PER_DEF é un punto differito
elif snapTypeOnSel == QadSnapTypeEnum.PER_DEF and entity.isInitialized():
# se era stato selezionato un punto esplicito
if (self.firstPtTan is None) and (self.firstPtPer is None):
self.firstPtTan = None
self.firstPtPer = value
self.firstGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
self.firstGeom.transform(coordTransform)
# imposto il map tool
self.getPointMapTool().per1 = self.firstPtPer
self.getPointMapTool().geom1 = self.firstGeom
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PER_KNOWN_ASK_FOR_SECOND_PT)
# se era stato selezionato un punto con la modalità TAN_DEF
elif self.firstPtTan is not None:
secondGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
secondGeom.transform(coordTransform)
tangent = qad_utils.lineFromTanPerPts(self.firstGeom, self.firstPtTan, secondGeom, value)
if tangent is not None:
# prendo il punto più vicino a value
if qad_utils.getDistance(tangent[0], value) < qad_utils.getDistance(tangent[1], value):
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[0]
else:
self.addVertex(tangent[0]) # aggiungo un nuovo vertice
self.addVertex(tangent[1]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = tangent[1]
# imposto il map tool
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
else:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo perpendicular possible"))
# se era stato selezionato un punto con la modalità PER_DEF
elif self.firstPtPer is not None:
secondGeom = QgsGeometry(entity.getGeometry()) # duplico la geometria
coordTransform = QgsCoordinateTransform(entity.layer.crs(), self.plugIn.canvas.mapSettings().destinationCrs()) # trasformo la geometria
secondGeom.transform(coordTransform)
line = qad_utils.lineFrom2PerPts(self.firstGeom, self.firstPtPer, secondGeom, value)
if line is not None:
# prendo il punto più vicino a value
if qad_utils.getDistance(line[0], value) < qad_utils.getDistance(line[1], value):
self.addVertex(line[1]) # aggiungo un nuovo vertice
self.addVertex(line[0]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = line[0]
else:
self.addVertex(line[0]) # aggiungo un nuovo vertice
self.addVertex(line[1]) # aggiungo un nuovo vertice
self.getPointMapTool().firstPt = line[1]
# imposto il map tool
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
else:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo perpendicular possible"))
else: # altrimenti é un punto esplicito
# se era stato selezionato un punto con la modalità TAN_DEF
if self.firstPtTan is not None:
snapper = QadSnapper()
snapper.setSnapPointCRS(self.plugIn.canvas.mapSettings().destinationCrs())
snapper.setSnapType(QadSnapTypeEnum.TAN)
snapper.setStartPoint(value)
oSnapPoints = snapper.getSnapPoint(self.firstGeom, self.firstPtTan,
self.plugIn.canvas.mapSettings().destinationCrs())
# memorizzo il punto di snap in point (prendo il primo valido)
for item in oSnapPoints.items():
points = item[1]
if points is not None:
self.addVertex(points[0]) # aggiungo un nuovo vertice
self.addVertex(value) # aggiungo un nuovo vertice
break
if len(self.vertices) == 0:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo tangent possible"))
# se era stato selezionato un punto con la modalità PER_DEF
elif self.firstPtPer is not None:
snapper = QadSnapper()
snapper.setSnapPointCRS(self.plugIn.canvas.mapSettings().destinationCrs())
snapper.setSnapType(QadSnapTypeEnum.PER)
snapper.setStartPoint(value)
oSnapPoints = snapper.getSnapPoint(self.firstGeom, self.firstPtPer,
self.plugIn.canvas.mapSettings().destinationCrs())
# memorizzo il punto di snap in point (prendo il primo valido)
for item in oSnapPoints.items():
points = item[1]
if points is not None:
self.addVertex(points[0]) # aggiungo un nuovo vertice
self.addVertex(value) # aggiungo un nuovo vertice
break
if len(self.vertices) == 0:
self.showMsg(QadMsg.translate("Command_LINE", "\nNo perpendicular possible"))
else:
self.addVertex(value) # aggiungo un nuovo vertice
if len(self.vertices) > 0:
# imposto il map tool
self.getPointMapTool().firstPt = value
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
else: # secondo punto
if value is None:
if self.virtualCmd == False: # se si vuole veramente salvare in un layer
self.addLinesToLayer(currLayer)
return True # fine comando
# se il primo punto é esplicito
if len(self.vertices) > 0 is not None:
self.addVertex(value) # aggiungo un nuovo vertice
# imposto il map tool
self.getPointMapTool().firstPt = value
self.getPointMapTool().setMode(Qad_line_maptool_ModeEnum.FIRST_PT_KNOWN_ASK_FOR_SECOND_PT)
if len(self.vertices) > 2:
keyWords = QadMsg.translate("Command_LINE", "Close") + "/" + \
QadMsg.translate("Command_LINE", "Undo")
else:
keyWords = QadMsg.translate("Command_LINE", "Undo")
prompt = QadMsg.translate("Command_LINE", "Specify next point or [{0}]: ").format(keyWords)
englishKeyWords = "Close" + "/" + "Undo"
keyWords += "_" + englishKeyWords
# si appresta ad attendere un punto o enter o una parola chiave
# msg, inputType, default, keyWords, nessun controllo
self.waitFor(prompt, \
QadInputTypeEnum.POINT2D | QadInputTypeEnum.KEYWORDS, \
None, \
keyWords, QadInputModeEnum.NONE)
return False
| geosim/QAD | qad_line_cmd.py | Python | gpl-3.0 | 22,677 |
from django.conf.urls import url
from .views import profile, update
from django.contrib.auth.views import password_change
urlpatterns = [
url(r'^profile/resetpassword/$', password_change, name='reset_password'),
url(r'^profile/(?P<id>\d+)/$', profile, name='profile'),
url(r'^update_profile/(?P<id>\d+)/$', update, name='update'),
] | revaxl/library | users/urls.py | Python | mit | 344 |
#
# jython examples for jas.
# $Id$
#
import sys;
from jas import Ring, PolyRing, QQ
from jas import startLog, terminate
# ideal intersection example
#r = Ring( "Rat(x,y,z) L" );
r = PolyRing( QQ(), "(x,y,z)", PolyRing.lex );
print "Ring: " + str(r);
print;
ps1 = """
(
( x - 1 ),
( y - 1 ),
( z - 1 )
)
""";
ps2 = """
(
( x - 2 ),
( y - 3 ),
( z - 3 )
)
""";
F1 = r.ideal( ps1 );
#print "Ideal: " + str(F1);
#print;
F2 = r.ideal( ps2 );
#print "Ideal: " + str(F2);
#print;
#startLog();
rg1 = F1.GB();
print "rg1 = ", rg1;
print;
rg2 = F2.GB();
print "rg2 = ", rg2;
print;
#startLog();
ig = F1.intersect(F2);
print "rg1 intersect rg2 = ", ig;
print;
terminate();
| breandan/java-algebra-system | examples/intersect.py | Python | gpl-2.0 | 684 |
"""Parallel workflow execution via SGE
"""
import os
import re
import subprocess
from time import sleep
from .base import (SGELikeBatchManagerBase, logger, iflogger, logging)
from nipype.interfaces.base import CommandLine
def qsubSanitizeJobName(testjobname):
""" Ensure that qsub job names must begin with a letter.
Numbers and punctuation are not allowed.
>>> qsubSanitizeJobName('01')
'J01'
>>> qsubSanitizeJobName('a01')
'a01'
"""
if testjobname[0].isalpha():
return testjobname
else:
return 'J'+testjobname
class SGEPlugin(SGELikeBatchManagerBase):
"""Execute using SGE (OGE not tested)
The plugin_args input to run can be used to control the SGE execution.
Currently supported options are:
- template : template to use for batch job submission
- qsub_args : arguments to be prepended to the job execution script in the
qsub call
"""
def __init__(self, **kwargs):
template = """
#$ -V
#$ -S /bin/sh
"""
self._retry_timeout = 2
self._max_tries = 2
if 'plugin_args' in kwargs and kwargs['plugin_args']:
if 'retry_timeout' in kwargs['plugin_args']:
self._retry_timeout = kwargs['plugin_args']['retry_timeout']
if 'max_tries' in kwargs['plugin_args']:
self._max_tries = kwargs['plugin_args']['max_tries']
super(SGEPlugin, self).__init__(template, **kwargs)
def _is_pending(self, taskid):
# subprocess.Popen requires taskid to be a string
proc = subprocess.Popen(["qstat", '-j', str(taskid)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
o, _ = proc.communicate()
return o.startswith('=')
def _submit_batchtask(self, scriptfile, node):
cmd = CommandLine('qsub', environ=os.environ.data,
terminal_output='allatonce')
path = os.path.dirname(scriptfile)
qsubargs = ''
if self._qsub_args:
qsubargs = self._qsub_args
if 'qsub_args' in node.plugin_args:
if 'overwrite' in node.plugin_args and\
node.plugin_args['overwrite']:
qsubargs = node.plugin_args['qsub_args']
else:
qsubargs += (" " + node.plugin_args['qsub_args'])
if '-o' not in qsubargs:
qsubargs = '%s -o %s' % (qsubargs, path)
if '-e' not in qsubargs:
qsubargs = '%s -e %s' % (qsubargs, path)
if node._hierarchy:
jobname = '.'.join((os.environ.data['LOGNAME'],
node._hierarchy,
node._id))
else:
jobname = '.'.join((os.environ.data['LOGNAME'],
node._id))
jobnameitems = jobname.split('.')
jobnameitems.reverse()
jobname = '.'.join(jobnameitems)
jobname = qsubSanitizeJobName(jobname)
cmd.inputs.args = '%s -N %s %s' % (qsubargs,
jobname,
scriptfile)
oldlevel = iflogger.level
iflogger.setLevel(logging.getLevelName('CRITICAL'))
tries = 0
while True:
try:
result = cmd.run()
except Exception, e:
if tries < self._max_tries:
tries += 1
sleep(self._retry_timeout) # sleep 2 seconds and try again.
else:
iflogger.setLevel(oldlevel)
raise RuntimeError('\n'.join((('Could not submit sge task'
' for node %s') % node._id,
str(e))))
else:
break
iflogger.setLevel(oldlevel)
# retrieve sge taskid
lines = [line for line in result.runtime.stdout.split('\n') if line]
taskid = int(re.match("Your job ([0-9]*) .* has been submitted",
lines[-1]).groups()[0])
self._pending[taskid] = node.output_dir()
logger.debug('Submitted the SGE task: %d for node %s with qsub'
' arguments %s' % (taskid, node._id, qsubargs))
return taskid
| FredLoney/nipype | nipype/pipeline/plugins/sge.py | Python | bsd-3-clause | 4,364 |
#!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Receive push events for new builds and upload rows to BigQuery."""
from __future__ import print_function
import argparse
import json
import os
import pprint
import socket
import sys
import traceback
import time
import multiprocessing.pool
try:
from google.cloud import bigquery
from google.cloud import pubsub
import google.cloud.exceptions
except ImportError:
print('WARNING: unable to load google cloud (test environment?)')
traceback.print_exc()
import model
import make_db
import make_json
def process_changes(results):
"""Split GCS change events into trivial acks and builds to further process."""
acks = [] # pubsub message ids to acknowledge
todo = [] # (id, job, build) of builds to grab
# process results, find finished builds to process
for ack_id, message in results:
if message.attributes['eventType'] != 'OBJECT_FINALIZE':
acks.append(ack_id)
continue
obj = message.attributes['objectId']
if not obj.endswith('/finished.json'):
acks.append(ack_id)
continue
job, build = obj[:-len('/finished.json')].rsplit('/', 1)
job = 'gs://%s/%s' % (message.attributes['bucketId'], job)
todo.append((ack_id, job, build))
return acks, todo
def get_started_finished(gcs_client, db, todo):
"""Download started/finished.json from build dirs in todo."""
acks = []
build_dirs = []
pool = multiprocessing.pool.ThreadPool(16)
try:
for ack_id, (build_dir, started, finished) in pool.imap_unordered(
lambda (ack_id, job, build): (ack_id, gcs_client.get_started_finished(job, build)),
todo):
if finished:
if not db.insert_build(build_dir, started, finished):
print('already present??')
start = time.localtime(started.get('timestamp', 0) if started else 0)
print(build_dir, bool(started), bool(finished),
time.strftime('%F %T %Z', start),
finished and finished.get('result'))
build_dirs.append(build_dir)
acks.append(ack_id)
else:
print('finished.json missing?', build_dir, started, finished)
finally:
pool.close()
db.commit()
return acks, build_dirs
def row_to_mapping(row, schema):
"""Convert a dictionary to a list for bigquery.Table.insert_data.
Silly. See https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3396
"""
return [row.get(field.name, [] if field.mode == 'REPEATED' else None) for field in schema]
def retry(func, *args, **kwargs):
"""Run a function with arguments, retrying on server errors. """
# pylint: disable=no-member
for attempt in xrange(20):
try:
return func(*args, **kwargs)
except (socket.error, google.cloud.exceptions.ServerError):
# retry with exponential backoff
traceback.print_exc()
time.sleep(1.4 ** attempt)
return func(*args, **kwargs) # one last attempt
def insert_data(table, rows_iter):
"""Upload rows from rows_iter into bigquery table table.
rows_iter should return a series of (row_id, row dictionary) tuples.
The row dictionary must match the table's schema.
Returns the row_ids that were inserted.
"""
emitted = set()
rows = []
row_ids = []
for row_id, row in rows_iter:
emitted.add(row_id)
if len(json.dumps(row)) > 1e6:
print('ERROR: row too long', row['path'])
continue
row = row_to_mapping(row, table.schema)
rows.append(row)
row_ids.append(row_id)
if not rows: # nothing to do
return []
def insert(table, rows, row_ids):
"""Insert rows with row_ids into table, retrying as necessary."""
errors = retry(table.insert_data, rows, row_ids, skip_invalid_rows=True)
if not errors:
print('Loaded {} builds into {}'.format(len(rows), table.name))
else:
print('Errors:')
pprint.pprint(errors)
pprint.pprint(table.schema)
if len(json.dumps(rows)) > 10e6:
print('WARNING: too big for one insert, doing stupid slow version')
for row, row_id in zip(rows, row_ids):
insert(table, [row], [row_id])
else:
insert(table, rows, row_ids)
return emitted
def main(db, sub, tables, client_class=make_db.GCSClient, stop=None):
# pylint: disable=too-many-locals
gcs_client = client_class('', {})
if stop is None:
stop = lambda: False
results = [0] * 1000 # don't sleep on first loop
while not stop():
print()
if len(results) < 10 and client_class is make_db.GCSClient:
time.sleep(5) # slow down!
print('====', time.strftime("%F %T %Z"), '=' * 40)
results = retry(sub.pull, max_messages=1000)
start = time.time()
while time.time() < start + 7:
results_more = sub.pull(max_messages=1000, return_immediately=True)
if not results_more:
break
results += results_more
print('PULLED', len(results))
acks, todo = process_changes(results)
if acks:
print('ACK irrelevant', len(acks))
for n in xrange(0, len(acks), 1000):
retry(sub.acknowledge, acks[n: n + 1000])
if todo:
print('EXTEND-ACK ', len(todo))
# give 3 minutes to grab build details
retry(sub.modify_ack_deadline, [i for i, _j, _b in todo], 60*3)
acks, build_dirs = get_started_finished(gcs_client, db, todo)
# notify pubsub queue that we've handled the finished.json messages
if acks:
print('ACK "finished.json"', len(acks))
retry(sub.acknowledge, acks)
# grab junit files for new builds
make_db.download_junit(db, 16, client_class)
# stream new rows to tables
if build_dirs and tables:
for table, incremental_table in tables.itervalues():
builds = db.get_builds_from_paths(build_dirs, incremental_table)
emitted = insert_data(table, make_json.make_rows(db, builds))
db.insert_emitted(emitted, incremental_table)
def load_sub(poll):
"""Return the PubSub subscription specificed by the /-separated input."""
project, topic, subscription = poll.split('/')
pubsub_client = pubsub.Client(project)
return pubsub_client.topic(topic).subscription(subscription)
def load_schema(schemafield):
"""Construct the expected BigQuery schema from files on disk.
Only used for new tables."""
basedir = os.path.dirname(__file__)
schema_json = json.load(open(os.path.join(basedir, 'schema.json')))
def make_field(spec):
spec['field_type'] = spec.pop('type')
if 'fields' in spec:
spec['fields'] = [make_field(f) for f in spec['fields']]
return schemafield(**spec)
return [make_field(s) for s in schema_json]
def load_tables(dataset, tablespecs):
"""Construct a dictionary of BigQuery tables given the input tablespec.
Args:
dataset: bigquery.Dataset
tablespecs: list of strings of "NAME:DAYS", e.g. ["day:1"]
Returns:
{name: (bigquery.Table, incremental table name)}
"""
project, dataset_name = dataset.split(':')
dataset = bigquery.Client(project).dataset(dataset_name)
tables = {}
for spec in tablespecs:
name, days = spec.split(':')
table = dataset.table(name)
try:
table.reload()
except google.cloud.exceptions.NotFound: # pylint: disable=no-member
table.schema = load_schema(bigquery.schema.SchemaField)
table.create()
tables[name] = (table, make_json.get_table(float(days)))
return tables
class StopWhen(object):
"""A simple object that returns True once when the given hour begins."""
def __init__(self, target, clock=lambda: time.localtime().tm_hour):
self.clock = clock
self.last = self.clock()
self.target = target
def __call__(self):
if os.path.exists('stop'):
return True
now = self.clock()
last = self.last
self.last = now
return now != last and now == self.target
def get_options(argv):
"""Process command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--poll',
required=True,
help='Follow GCS changes from project/topic/subscription',
)
parser.add_argument(
'--dataset',
help='BigQuery dataset (e.g. k8s-gubernator:build)'
)
parser.add_argument(
'--tables',
nargs='+',
default=[],
help='Upload rows to table:days [e.g. --tables day:1 week:7 all:0]',
)
parser.add_argument(
'--stop_at',
type=int,
help='Terminate when this hour (0-23) rolls around (in local time).'
)
return parser.parse_args(argv)
if __name__ == '__main__':
OPTIONS = get_options(sys.argv[1:])
main(model.Database(),
load_sub(OPTIONS.poll),
load_tables(OPTIONS.dataset, OPTIONS.tables),
stop=StopWhen(OPTIONS.stop_at))
| spxtr/test-infra | kettle/stream.py | Python | apache-2.0 | 9,971 |
# this example shows how to create fillets between two lines
import HeeksPython as cad
cad.sketch()
sketch = cad.getlastobj()
cad.line(0,1,4,1)
l1= cad.getlastobj()
cad.line(4,1,4,2)
l2= cad.getlastobj()
cad.line(4,2,0,2)
l3= cad.getlastobj()
cad.line(0,2,0,1)
l4= cad.getlastobj()
cad.add(sketch,l1)
cad.add(sketch,l2)
cad.add(sketch,l3)
cad.add(sketch, l4)
cad.fillet2d(sketch,0,1,0, .1)
cad.fillet2d(sketch,4,1,0,.1)
cad.fillet2d(sketch,4,2,0,.1)
cad.fillet2d(sketch,0,2,0,.1)
cad.reorder(sketch)
| JohnyEngine/CNC | deprecated/heekspython/examples/fillets.py | Python | apache-2.0 | 501 |
# file: client/config.py
config = {
"host": "localhost",
"port": 9999,
"sleep": 5
} | vanzhiganov/pySocket | client/config.py | Python | unlicense | 95 |
# :coding: utf-8
import pytest
import champollion.parser.js_class
@pytest.mark.parametrize(
("content", "expected"),
[
(
(
"/**\n"
" * Simple class\n"
" */\n"
"class SimpleClass {}\n"
),
{
"test.module.SimpleClass": {
"id": "test.module.SimpleClass",
"module_id": "test.module",
"exported": False,
"default": False,
"name": "SimpleClass",
"parent": None,
"line_number": 4,
"description": "Simple class",
"method": {},
"attribute": {}
}
}
),
(
(
"/**\n"
" * Simple class with attributes\n"
" */\n"
"class SimpleClass {\n"
" /**\n"
" * A static attribute.\n"
" */\n"
" static attribute1 = 42;\n"
"\n"
" /**\n"
" * An object attribute.\n"
" */\n"
" attribute2 = {\n"
" key1: 'value1',\n"
" key2: 'value2',\n"
" };\n"
"\n"
" /**\n"
" * A list attribute.\n"
" */\n"
" attribute3 = [\n"
" 'value1',\n"
" 'value2',\n"
" ];\n"
"}\n"
),
{
"test.module.SimpleClass": {
"id": "test.module.SimpleClass",
"module_id": "test.module",
"exported": False,
"default": False,
"name": "SimpleClass",
"parent": None,
"line_number": 4,
"description": "Simple class with attributes",
"method": {},
"attribute": {
"test.module.SimpleClass.attribute1": {
"id": "test.module.SimpleClass.attribute1",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "attribute1",
"prefix": "static",
"value": "42",
"line_number": 8,
"description": "A static attribute."
},
"test.module.SimpleClass.attribute2": {
"id": "test.module.SimpleClass.attribute2",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "attribute2",
"prefix": None,
"value": "{ key1: 'value1', key2: 'value2', }",
"line_number": 13,
"description": "An object attribute."
},
"test.module.SimpleClass.attribute3": {
"id": "test.module.SimpleClass.attribute3",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "attribute3",
"prefix": None,
"value": "[ 'value1', 'value2', ]",
"line_number": 21,
"description": "A list attribute."
}
}
}
}
),
(
(
"/**\n"
" * Simple class with getter and setter\n"
" */\n"
"class SimpleClass {\n"
" /**\n"
" * Get the name.\n"
" *\n"
" * .. warning::\n"
" *\n"
" * The name is awesome\n"
" */\n"
" get name() {\n"
" return this.name;\n"
" }\n"
"\n"
" /**\n"
" * Set the name.\n"
" *\n"
" * .. warning::\n"
" *\n"
" * Keep the name awesome\n"
" */\n"
" set name(value) {\n"
" this.name = value;\n"
" }\n"
"}\n"
),
{
"test.module.SimpleClass": {
"id": "test.module.SimpleClass",
"module_id": "test.module",
"exported": False,
"default": False,
"name": "SimpleClass",
"parent": None,
"line_number": 4,
"description": "Simple class with getter and setter",
"method": {
"test.module.SimpleClass.name.get": {
"id": "test.module.SimpleClass.name.get",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "name",
"prefix": "get",
"arguments": [],
"line_number": 12,
"description": (
"Get the name.\n"
"\n"
".. warning::\n"
"\n"
" The name is awesome"
)
},
"test.module.SimpleClass.name.set": {
"id": "test.module.SimpleClass.name.set",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "name",
"prefix": "set",
"arguments": ["value"],
"line_number": 23,
"description": (
"Set the name.\n"
"\n"
".. warning::\n"
"\n"
" Keep the name awesome"
)
}
},
"attribute": {}
}
}
),
(
(
"/**\n"
" * Simple class with constructor\n"
" */\n"
"class SimpleClass {\n"
" constructor() {\n"
" }\n"
"}\n"
),
{
"test.module.SimpleClass": {
"id": "test.module.SimpleClass",
"module_id": "test.module",
"exported": False,
"default": False,
"name": "SimpleClass",
"parent": None,
"line_number": 4,
"description": "Simple class with constructor",
"method": {
"test.module.SimpleClass.constructor": {
"id": "test.module.SimpleClass.constructor",
"class_id": "test.module.SimpleClass",
"module_id": "test.module",
"name": "constructor",
"prefix": None,
"arguments": [],
"line_number": 5,
"description": None
}
},
"attribute": {}
}
}
),
(
(
"/** Simple class expression */\n"
"export const CustomWelcome = class Welcome extends Base{\n"
" static expression= 'Hello World';\n"
"\n"
" /** Say Hi to the world */\n"
" greeting() {\n"
" return this.expression;\n"
" }\n"
"};\n"
"\n"
),
{
"test.module.CustomWelcome": {
"id": "test.module.CustomWelcome",
"module_id": "test.module",
"exported": True,
"default": False,
"name": "CustomWelcome",
"parent": "Base",
"line_number": 2,
"description": "Simple class expression",
"method": {
"test.module.CustomWelcome.greeting": {
"id": "test.module.CustomWelcome.greeting",
"class_id": "test.module.CustomWelcome",
"module_id": "test.module",
"name": "greeting",
"prefix": None,
"arguments": [],
"line_number": 6,
"description": "Say Hi to the world"
}
},
"attribute": {
"test.module.CustomWelcome.expression": {
"id": "test.module.CustomWelcome.expression",
"class_id": "test.module.CustomWelcome",
"module_id": "test.module",
"name": "expression",
"prefix": "static",
"value": "'Hello World'",
"line_number": 3,
"description": None
}
}
}
}
),
(
(
(
"/** Simple class */\n"
"class Welcome {\n"
" /** Say Hi to someone */\n"
" greeting = who =>\n"
" `Hello ${who}!`;\n"
"};\n"
"\n"
),
{
"test.module.Welcome": {
"id": "test.module.Welcome",
"module_id": "test.module",
"exported": False,
"default": False,
"name": "Welcome",
"parent": None,
"line_number": 2,
"description": "Simple class",
"method": {
"test.module.Welcome.greeting": {
"id": "test.module.Welcome.greeting",
"class_id": "test.module.Welcome",
"module_id": "test.module",
"name": "greeting",
"prefix": None,
"arguments": ["who"],
"line_number": 4,
"description": "Say Hi to someone"
}
},
"attribute": {
"test.module.Welcome.greeting": {
"id": "test.module.Welcome.greeting",
"class_id": "test.module.Welcome",
"module_id": "test.module",
"name": "greeting",
"prefix": None,
"value": "who =>`Hello ${who}!`",
"line_number": 4,
"description": "Say Hi to someone"
}
}
},
}
)
)
],
ids=[
"valid class",
"valid class with attributes",
"valid class with getter and setter methods",
"valid class with constructor",
"valid exported class expression with mother class",
"valid class with arrow-type method and a single argument",
]
)
def test_get_class_environment(content, expected):
"""Return class environment from content."""
assert champollion.parser.js_class.fetch_environment(
content, "test.module"
) == expected
@pytest.mark.parametrize(
("content", "expected"),
[
(
"class AwesomeClass",
None
),
(
"const test= 'class AwesomeClass {}';",
None
),
(
"class AwesomeClass {}",
{
"export": None,
"default": None,
"class_name": "AwesomeClass",
"data_name": None,
"mother_class": None,
"start_regex": ""
}
),
(
"class Awesome_Class extends module.Mother-Class{}",
{
"export": None,
"default": None,
"class_name": "Awesome_Class",
"data_name": None,
"mother_class": "module.Mother-Class",
"start_regex": ""
}
),
(
"export default class AwesomeClass {}",
{
"export": "export ",
"default": "default ",
"class_name": "AwesomeClass",
"data_name": None,
"mother_class": None,
"start_regex": ""
}
),
(
"export const MyClass1= class AwesomeClass {}",
{
"export": "export ",
"default": None,
"class_name": None,
"data_name": "MyClass1",
"mother_class": None,
"start_regex": ""
}
),
(
"let MyClass1= class AwesomeClass extends Test2 {}",
{
"export": None,
"default": None,
"class_name": None,
"data_name": "MyClass1",
"mother_class": "Test2",
"start_regex": ""
}
),
],
ids=[
"invalid class",
"invalid class string",
"valid class",
"valid class with inheritance",
"valid class exported by default",
"valid class expression exported",
"another valid class expression",
]
)
def test_class_pattern(content, expected):
"""Match a class."""
match = champollion.parser.js_class._CLASS_PATTERN.search(content)
if expected is None:
assert match is None
else:
assert match.groupdict() == expected
@pytest.mark.parametrize(
("content", "expected"),
[
(
"valid-Method (arg1) {}",
{
"arguments": "arg1",
"method_name": "valid-Method",
"prefix": None,
"start_regex": ""
}
),
(
"validMethod (\n"
" arg1, arg2, arg3, arg4, arg5,\n"
" arg6, arg7, arg8, arg9, arg10\n"
") {}",
{
"arguments": (
"arg1, arg2, arg3, arg4, arg5,\n"
" arg6, arg7, arg8, arg9, arg10"
),
"method_name": "validMethod",
"prefix": None,
"start_regex": ""
}
),
(
"static valid_method() {}",
{
"arguments": "",
"method_name": "valid_method",
"prefix": "static ",
"start_regex": ""
}
),
(
"get valid_method2(){}",
{
"arguments": "",
"method_name": "valid_method2",
"prefix": "get ",
"start_regex": ""
}
),
(
(
"set validMethod( \n"
" arg1, arg2, arg3, arg4, arg5,\n"
"){\n"
" console.log('test');\n"
"}\n"
),
{
"arguments": "arg1, arg2, arg3, arg4, arg5,",
"method_name": "validMethod",
"prefix": "set ",
"start_regex": ""
}
),
(
"function invalidMethod() {}",
None
),
(
"invalidMethod()",
None
),
(
"invalidMethod {}",
None
),
],
ids=[
"valid method",
"valid method with many arguments",
"valid static method",
"valid getter method",
"valid setter method",
"invalid method with 'function' statement",
"invalid method without nested element",
"invalid method without argument",
]
)
def test_class_method_pattern(content, expected):
"""Match a class method."""
match = champollion.parser.js_class._CLASS_METHOD_PATTERN.search(
content
)
if expected is None:
assert match is None
else:
assert match.groupdict() == expected
@pytest.mark.parametrize(
("content", "expected"),
[
(
"arrow_type_method = (arg1) => {};",
{
"arguments": "arg1",
"single_argument": None,
"method_name": "arrow_type_method",
"prefix": None,
"start_regex": ""
}
),
(
"arrow_type_method2 = arg1 => {};",
{
"arguments": None,
"single_argument": "arg1",
"method_name": "arrow_type_method2",
"prefix": None,
"start_regex": ""
}
),
(
"static arrow_type_method = (arg1) => {};",
{
"arguments": "arg1",
"single_argument": None,
"method_name": "arrow_type_method",
"prefix": "static ",
"start_regex": ""
}
),
(
"arrow_type_method3 = (arg1, arg2) => {};",
{
"arguments": "arg1, arg2",
"single_argument": None,
"method_name": "arrow_type_method3",
"prefix": None,
"start_regex": ""
}
),
(
(
"arrow_type_method3 = (\n"
" arg1, arg2, arg3, arg4, arg5, agr6,\n"
" arg7\n"
") => {\n"
" console.log('youpi');\n"
"};\n"
),
{
"arguments": "arg1, arg2, arg3, arg4, arg5, agr6,\n arg7",
"single_argument": None,
"method_name": "arrow_type_method3",
"prefix": None,
"start_regex": ""
}
),
(
"const arrow_type_method = (arg1) => {}",
None
),
(
"const test = 'arrow_type_method = (arg1) => {}'",
None
),
(
"(arg1) => {}",
None
),
(
"const arrow_type_method = arg1, arg2 => {};",
None
),
],
ids=[
"valid method with one argument and brackets",
"valid method with one argument and no brackets",
"valid static method",
"valid method with two arguments",
"valid method with multiple arguments",
"invalid arrow-type method with type",
"invalid arrow-type method string",
"invalid unassigned arrow-type method",
"invalid arrow-type method with multiple argument and no brackets",
]
)
def test_class_method_arrow_pattern(content, expected):
"""Match a class arrow-type method."""
match = champollion.parser.js_class._CLASS_METHOD_ARROW_PATTERN.search(
content
)
if expected is None:
assert match is None
else:
assert match.groupdict() == expected
@pytest.mark.parametrize(
("content", "expected"),
[
(
"attribute_test1 = 42;",
{
"name": "attribute_test1",
"value": "42;",
"prefix": None,
"start_regex": ""
}
),
(
(
"static attribute_test2 = {\n"
" key: 'value',\n"
"};"
),
{
"name": "attribute_test2",
"value": (
"{\n"
" key: 'value',\n"
"};"
),
"prefix": "static ",
"start_regex": ""
}
),
(
(
"attribute_test3 = [\n"
" 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,\n"
"];"
),
{
"name": "attribute_test3",
"value": (
"[\n"
" 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,\n"
"];"
),
"prefix": None,
"start_regex": ""
}
),
(
"const test = 'static attribute = 42';",
None
),
(
"attribute_test1 = 42",
None
),
],
ids=[
"valid attribute",
"valid static object attribute",
"valid static object attribute",
"invalid attribute string",
"invalid attribute with no semi-colons",
]
)
def test_class_attribute_pattern(content, expected):
"""Match a class attribute."""
match = champollion.parser.js_class._CLASS_ATTRIBUTE_PATTERN.search(
content
)
if expected is None:
assert match is None
else:
assert match.groupdict() == expected
| buddly27/champollion | test/unit/parser/test_parser_js_class.py | Python | apache-2.0 | 22,754 |
#@PydevCodeAnalysisIgnore
'''
Note that this test is run from org.python.pydev.jythontests.JythonTest
(to have the needed eclipse libraries)
'''
import unittest
import sys
IS_JYTHON = sys.platform.find('java') != -1
#===================================================================================================
# PyContextType
#===================================================================================================
class PyContextType:
def __init__(self):
self.resolvers = []
def addResolver(self, resolver):
self.resolvers.append(resolver)
import __builtin__
__builtin__.False = False
__builtin__.True = True
py_context_type = PyContextType()
__builtin__.py_context_type = py_context_type
#===================================================================================================
# Context
#===================================================================================================
class Context:
def __init__(self, doc):
self.doc = doc
self.viewer = self
def getDocument(self):
return self.doc
def isCythonFile(self):
return False
#===================================================================================================
# Test
#===================================================================================================
class Test(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
import pytemplate_defaults #Just importing it will fill the py_context_type
pytemplate_defaults._CreateSelection = self._CreateSelection
def _CreateSelection(self, editor):
return self._selection
def testResolvers(self):
types = {}
for r in py_context_type.resolvers:
types[r.type] = r
expected = [
'current_class',
'current_method',
'current_qualified_scope',
'file',
'lparen_if_py3',
'module',
'next_class_or_method',
'prev_class_or_method',
'rparen_if_py3',
'space_if_py2',
'superclass',
'pydevd_dir_location',
'pydevd_file_location',
]
gotten = types.keys()
gotten.sort()
expected.sort()
self.assertEqual(expected, gotten)
self.CheckCase1(types)
self.CheckCase2(types)
def CheckCase1(self, types):
doc = '''class A(object): # line 0
def m1(self): #line 2
pass
def m2(self): #line 5
pass
'''
from org.eclipse.jface.text import Document
from org.python.pydev.core.docutils import PySelection
doc = Document(doc)
self._selection = PySelection(doc, 1, 0)
context = Context(doc)
self.assertEqual(['A'], types['current_class'].resolveAll(context))
self.assertEqual([''], types['current_method'].resolveAll(context))
self.assertEqual(['A'], types['current_qualified_scope'].resolveAll(context))
self.assertEqual(['A'], types['prev_class_or_method'].resolveAll(context))
self.assertEqual(['m1'], types['next_class_or_method'].resolveAll(context))
self.assertEqual(['object'], types['superclass'].resolveAll(context))
def CheckCase2(self, types):
from org.eclipse.jface.text import Document
from org.python.pydev.core.docutils import PySelection
doc = '''class A(object
'''
doc = Document(doc)
self._selection = PySelection(doc, 1, 0)
context = Context(doc)
self.assertEqual(['A'], types['current_class'].resolveAll(context))
self.assertEqual([''], types['current_method'].resolveAll(context))
self.assertEqual(['A'], types['current_qualified_scope'].resolveAll(context))
self.assertEqual(['A'], types['prev_class_or_method'].resolveAll(context))
self.assertEqual([''], types['next_class_or_method'].resolveAll(context))
self.assertEqual([''], types['superclass'].resolveAll(context))
doc = '''class A(object, obj, foo)
'''
doc = Document(doc)
self._selection = PySelection(doc, 1, 0)
context = Context(doc)
self.assertEqual(['A'], types['current_class'].resolveAll(context))
self.assertEqual([''], types['current_method'].resolveAll(context))
self.assertEqual(['A'], types['current_qualified_scope'].resolveAll(context))
self.assertEqual(['A'], types['prev_class_or_method'].resolveAll(context))
self.assertEqual([''], types['next_class_or_method'].resolveAll(context))
self.assertEqual(['object', 'obj', 'foo'], types['superclass'].resolveAll(context))
doc = '''class A(object, #comment
obj, foo)
'''
doc = Document(doc)
self._selection = PySelection(doc, 1, 0)
context = Context(doc)
self.assertEqual(['A'], types['current_class'].resolveAll(context))
self.assertEqual([''], types['current_method'].resolveAll(context))
self.assertEqual(['A'], types['current_qualified_scope'].resolveAll(context))
self.assertEqual(['A'], types['prev_class_or_method'].resolveAll(context))
self.assertEqual([''], types['next_class_or_method'].resolveAll(context))
self.assertEqual(['object', 'obj', 'foo'], types['superclass'].resolveAll(context))
#===================================================================================================
# main
#===================================================================================================
if __name__ == '__main__':
if IS_JYTHON:
suite = unittest.makeSuite(Test)
unittest.TextTestRunner(verbosity=1).run(suite)
else:
sys.stdout.write('Not running jython tests for non-java platform: %s' % sys.platform)
| smkr/pyclipse | plugins/org.python.pydev.jython/jysrc/tests/test_templates.py | Python | epl-1.0 | 6,214 |
import time
import subprocess
import sys
def CheckHaltFile(haltfilename):
halt = 0
with open(haltfilename,'r') as haltfile:
halt = int(haltfile.readline())
return halt
np = sys.argv[1]
#Number of forecasters in the group
num_forecasters = 1
#Filenames
haltfilename = 'examples/fgroup/halt'
timesfilename = 'examples/fgroup/times'
ptimesfilename = 'examples/fgroup/output_'
#Create halt file
halt = 0
with open(haltfilename,'w') as haltfile:
haltfile.write(str(halt))
#Get the first batch of init and final times
with open(timesfilename) as infile:
old_times = []
for line in infile:
if line.strip():
old_times.append([int(x) for x in line.split()])
#Check that enough initial timestamps are set in the times file
N = len(old_times)
if N != num_forecasters:
print 'Error: expected',num_forecasters,'forecasters. Got data for ',N
sys.exit(1)
while(halt == 0):
#Call programs #######################
idx = -1
#0: Toplayer - IFC (ifc1c)
idx += 1
cmd = 'mpirun -np '+str(np)+' ./FORECASTER_MAPS_END examples/GlobalForecast262_ifc1c.gbl examples/fcast_file.fcst '+str(old_times[idx][0])+' '+str(old_times[idx][1])+' '+ptimesfilename+str(idx)+' '+str(old_times[idx][0]-3600)+' 0 0'
print '\nRunning command',cmd
sys.stdout.flush()
time.sleep(1)
subprocess.call(cmd,shell=True)
#Create new times files
new_times = []
outfile = open(timesfilename,'w')
for i in range(N):
with open(ptimesfilename+str(i),'r') as infile:
for line in infile:
towrite = ''
holder = []
for x in line.split():
holder.append(int(x))
holder.append(int(x))
towrite = towrite + x + ' '
new_times.append([x for x in holder])
outfile.write(towrite+towrite+'\n')
outfile.close()
print 'Got',new_times
sys.stdout.flush()
#Check the halt file
halt = CheckHaltFile(haltfilename)
#Check if any progress was made
if halt == 0:
if old_times == new_times:
print 'No progress made. Sleeping...'
sys.stdout.flush()
time.sleep(10*60)
halt = CheckHaltFile(haltfilename)
else:
print 'Going for the next round'
sys.stdout.flush()
old_times = new_times
print 'Halt signal received'
| ssmall41/FloodForecasters | forecaster_group_example.py | Python | gpl-2.0 | 2,144 |
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for manipulating Bays via the DB API"""
import six
from magnum.common import context
from magnum.common import exception
from magnum.common import utils as magnum_utils
from magnum.objects.bay import Status as bay_status
from magnum.tests.unit.db import base
from magnum.tests.unit.db import utils
class DbBayTestCase(base.DbTestCase):
def test_create_bay(self):
utils.create_test_bay()
def test_create_bay_nullable_baymodel_id(self):
utils.create_test_bay(baymodel_id=None)
def test_create_bay_already_exists(self):
utils.create_test_bay()
self.assertRaises(exception.BayAlreadyExists,
utils.create_test_bay)
def test_get_bay_by_id(self):
bay = utils.create_test_bay()
res = self.dbapi.get_bay_by_id(self.context, bay.id)
self.assertEqual(bay.id, res.id)
self.assertEqual(bay.uuid, res.uuid)
def test_get_bay_by_name(self):
bay = utils.create_test_bay()
res = self.dbapi.get_bay_by_name(self.context, bay.name)
self.assertEqual(bay.name, res.name)
self.assertEqual(bay.uuid, res.uuid)
def test_get_bay_by_uuid(self):
bay = utils.create_test_bay()
res = self.dbapi.get_bay_by_uuid(self.context, bay.uuid)
self.assertEqual(bay.id, res.id)
self.assertEqual(bay.uuid, res.uuid)
def test_get_bay_that_does_not_exist(self):
self.assertRaises(exception.BayNotFound,
self.dbapi.get_bay_by_id,
self.context, 999)
self.assertRaises(exception.BayNotFound,
self.dbapi.get_bay_by_uuid,
self.context,
'12345678-9999-0000-aaaa-123456789012')
def test_get_bay_list(self):
uuids = []
for i in range(1, 6):
bay = utils.create_test_bay(uuid=magnum_utils.generate_uuid())
uuids.append(six.text_type(bay['uuid']))
res = self.dbapi.get_bay_list(self.context)
res_uuids = [r.uuid for r in res]
self.assertEqual(sorted(uuids), sorted(res_uuids))
def test_get_bay_list_sorted(self):
uuids = []
for _ in range(5):
bay = utils.create_test_bay(uuid=magnum_utils.generate_uuid())
uuids.append(six.text_type(bay.uuid))
res = self.dbapi.get_bay_list(self.context, sort_key='uuid')
res_uuids = [r.uuid for r in res]
self.assertEqual(sorted(uuids), res_uuids)
self.assertRaises(exception.InvalidParameterValue,
self.dbapi.get_bay_list,
self.context,
sort_key='foo')
def test_get_bay_list_with_filters(self):
bm1 = utils.get_test_baymodel(id=1, uuid=magnum_utils.generate_uuid())
bm2 = utils.get_test_baymodel(id=2, uuid=magnum_utils.generate_uuid())
self.dbapi.create_baymodel(bm1)
self.dbapi.create_baymodel(bm2)
bay1 = utils.create_test_bay(
name='bay-one',
uuid=magnum_utils.generate_uuid(),
baymodel_id=bm1['uuid'],
status=bay_status.CREATE_IN_PROGRESS)
bay2 = utils.create_test_bay(
name='bay-two',
uuid=magnum_utils.generate_uuid(),
baymodel_id=bm2['uuid'],
node_count=1,
master_count=1,
status=bay_status.UPDATE_IN_PROGRESS)
bay3 = utils.create_test_bay(
name='bay-three',
node_count=2,
master_count=5,
status=bay_status.DELETE_IN_PROGRESS)
res = self.dbapi.get_bay_list(self.context,
filters={'baymodel_id': bm1['uuid']})
self.assertEqual([bay1.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'baymodel_id': bm2['uuid']})
self.assertEqual([bay2.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'name': 'bay-one'})
self.assertEqual([bay1.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'name': 'bad-bay'})
self.assertEqual([], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'node_count': 3})
self.assertEqual([bay1.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'node_count': 1})
self.assertEqual([bay2.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'master_count': 3})
self.assertEqual([bay1.id], [r.id for r in res])
res = self.dbapi.get_bay_list(self.context,
filters={'master_count': 1})
self.assertEqual([bay2.id], [r.id for r in res])
filters = {'status': [bay_status.CREATE_IN_PROGRESS,
bay_status.DELETE_IN_PROGRESS]}
res = self.dbapi.get_bay_list(self.context,
filters=filters)
self.assertEqual([bay1.id, bay3.id], [r.id for r in res])
def test_get_bay_list_by_admin_all_tenants(self):
uuids = []
for i in range(1, 6):
bay = utils.create_test_bay(
uuid=magnum_utils.generate_uuid(),
project_id=magnum_utils.generate_uuid(),
user_id=magnum_utils.generate_uuid())
uuids.append(six.text_type(bay['uuid']))
ctx = context.make_admin_context()
res = self.dbapi.get_bay_list(ctx, opts={'get_all_tenants': True})
res_uuids = [r.uuid for r in res]
self.assertEqual(sorted(uuids), sorted(res_uuids))
def test_get_bay_list_baymodel_not_exist(self):
utils.create_test_bay()
self.assertEqual(1, len(self.dbapi.get_bay_list(self.context)))
res = self.dbapi.get_bay_list(self.context, filters={
'baymodel_id': magnum_utils.generate_uuid()})
self.assertEqual(0, len(res))
def test_destroy_bay(self):
bay = utils.create_test_bay()
self.assertIsNotNone(self.dbapi.get_bay_by_id(self.context,
bay.id))
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.BayNotFound,
self.dbapi.get_bay_by_id,
self.context, bay.id)
def test_destroy_bay_by_uuid(self):
bay = utils.create_test_bay()
self.assertIsNotNone(self.dbapi.get_bay_by_uuid(self.context,
bay.uuid))
self.dbapi.destroy_bay(bay.uuid)
self.assertRaises(exception.BayNotFound,
self.dbapi.get_bay_by_uuid, self.context,
bay.uuid)
def test_destroy_bay_that_does_not_exist(self):
self.assertRaises(exception.BayNotFound,
self.dbapi.destroy_bay,
'12345678-9999-0000-aaaa-123456789012')
def test_destroy_bay_that_has_pods(self):
bay = utils.create_test_bay()
pod = utils.create_test_pod(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, pod.bay_uuid)
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.PodNotFound,
self.dbapi.get_pod_by_id, self.context, pod.id)
def test_destroy_bay_that_has_pods_by_uuid(self):
bay = utils.create_test_bay()
pod = utils.create_test_pod(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, pod.bay_uuid)
self.dbapi.destroy_bay(bay.uuid)
self.assertRaises(exception.PodNotFound,
self.dbapi.get_pod_by_id, self.context, pod.id)
def test_destroy_bay_that_has_services(self):
bay = utils.create_test_bay()
service = utils.create_test_service(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, service.bay_uuid)
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.ServiceNotFound,
self.dbapi.get_service_by_id,
self.context, service.id)
def test_destroy_bay_that_has_services_by_uuid(self):
bay = utils.create_test_bay()
service = utils.create_test_service(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, service.bay_uuid)
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.ServiceNotFound,
self.dbapi.get_service_by_id,
self.context, service.id)
def test_destroy_bay_that_has_rc(self):
bay = utils.create_test_bay()
rc = utils.create_test_rc(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, rc.bay_uuid)
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.ReplicationControllerNotFound,
self.dbapi.get_rc_by_id,
self.context, rc.id)
def test_destroy_bay_that_has_rc_by_uuid(self):
bay = utils.create_test_bay()
rc = utils.create_test_rc(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, rc.bay_uuid)
self.dbapi.destroy_bay(bay.uuid)
self.assertRaises(exception.ReplicationControllerNotFound,
self.dbapi.get_rc_by_id,
self.context, rc.id)
def test_destroy_bay_that_has_containers(self):
bay = utils.create_test_bay()
container = utils.create_test_container(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, container.bay_uuid)
self.dbapi.destroy_bay(bay.id)
self.assertRaises(exception.ContainerNotFound,
self.dbapi.get_container_by_id,
self.context, container.id)
def test_destroy_bay_that_has_containers_by_uuid(self):
bay = utils.create_test_bay()
container = utils.create_test_container(bay_uuid=bay.uuid)
self.assertEqual(bay.uuid, container.bay_uuid)
self.dbapi.destroy_bay(bay.uuid)
self.assertRaises(exception.ContainerNotFound,
self.dbapi.get_container_by_id,
self.context, container.id)
def test_update_bay(self):
bay = utils.create_test_bay()
old_nc = bay.node_count
new_nc = 5
self.assertNotEqual(old_nc, new_nc)
res = self.dbapi.update_bay(bay.id, {'node_count': new_nc})
self.assertEqual(new_nc, res.node_count)
def test_update_bay_not_found(self):
bay_uuid = magnum_utils.generate_uuid()
self.assertRaises(exception.BayNotFound, self.dbapi.update_bay,
bay_uuid, {'node_count': 5})
def test_update_bay_uuid(self):
bay = utils.create_test_bay()
self.assertRaises(exception.InvalidParameterValue,
self.dbapi.update_bay, bay.id,
{'uuid': ''})
| ddepaoli3/magnum | magnum/tests/unit/db/test_bay.py | Python | apache-2.0 | 11,781 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
self.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterCrs,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterEnum,
QgsProcessingParameterBoolean,
QgsProcessingParameterExtent,
QgsProcessingParameterRasterDestination,
QgsProcessingUtils)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class warp(GdalAlgorithm):
INPUT = 'INPUT'
SOURCE_CRS = 'SOURCE_CRS'
TARGET_CRS = 'TARGET_CRS'
NODATA = 'NODATA'
TARGET_RESOLUTION = 'TARGET_RESOLUTION'
OPTIONS = 'OPTIONS'
RESAMPLING = 'RESAMPLING'
DATA_TYPE = 'DATA_TYPE'
TARGET_EXTENT = 'TARGET_EXTENT'
TARGET_EXTENT_CRS = 'TARGET_EXTENT_CRS'
MULTITHREADING = 'MULTITHREADING'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.methods = ((self.tr('Nearest neighbour'), 'near'),
(self.tr('Bilinear'), 'bilinear'),
(self.tr('Cubic'), 'cubic'),
(self.tr('Cubic spline'), 'cubicspline'),
(self.tr('Lanczos windowed sinc'), 'lanczos'),
(self.tr('Average'), 'average'),
(self.tr('Mode'), 'mode'),
(self.tr('Maximum'), 'max'),
(self.tr('Minimum'), 'min'),
(self.tr('Median'), 'med'),
(self.tr('First quartile'), 'q1'),
(self.tr('Third quartile'), 'q3'))
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterCrs(self.SOURCE_CRS,
self.tr('Source CRS'),
optional=True))
self.addParameter(QgsProcessingParameterCrs(self.TARGET_CRS,
self.tr('Target CRS'),
'EPSG:4326'))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('Nodata value for output bands'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.TARGET_RESOLUTION,
self.tr('Output file resolution in target georeferenced units'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=None))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation parameters'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
self.addParameter(QgsProcessingParameterEnum(self.RESAMPLING,
self.tr('Resampling method to use'),
options=[i[0] for i in self.methods],
defaultValue=0))
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
target_extent_param = QgsProcessingParameterExtent(self.TARGET_EXTENT,
self.tr('Georeferenced extents of output file to be created'),
optional=True)
target_extent_param.setFlags(target_extent_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(target_extent_param)
target_extent_crs_param = QgsProcessingParameterCrs(self.TARGET_EXTENT_CRS,
self.tr('CRS of the target raster extent'),
optional=True)
target_extent_crs_param.setFlags(target_extent_crs_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(target_extent_crs_param)
multithreading_param = QgsProcessingParameterBoolean(self.MULTITHREADING,
self.tr('Use multithreaded warping implementation'),
defaultValue=False)
multithreading_param.setFlags(multithreading_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(multithreading_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Reprojected')))
def name(self):
return 'warpreproject'
def displayName(self):
return self.tr('Warp (reproject)')
def group(self):
return self.tr('Raster projections')
def groupId(self):
return 'rasterprojections'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'warp.png'))
def tags(self):
return self.tr('transform,reproject,crs,srs').split(',')
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
sourceCrs = self.parameterAsCrs(parameters, self.SOURCE_CRS, context)
targetCrs = self.parameterAsCrs(parameters, self.TARGET_CRS, context)
nodata = self.parameterAsDouble(parameters, self.NODATA, context)
resolution = self.parameterAsDouble(parameters, self.TARGET_RESOLUTION, context)
arguments = []
if sourceCrs.isValid():
arguments.append('-s_srs')
arguments.append(sourceCrs.authid())
if targetCrs.isValid():
arguments.append('-t_srs')
arguments.append(targetCrs.authid())
if nodata:
arguments.append('-dstnodata')
arguments.append(str(nodata))
if resolution:
arguments.append('-tr')
arguments.append(str(resolution))
arguments.append(str(resolution))
arguments.append('-r')
arguments.append(self.methods[self.parameterAsEnum(parameters, self.RESAMPLING, context)][1])
extent = self.parameterAsExtent(parameters, self.TARGET_EXTENT, context)
if not extent.isNull():
arguments.append('-te')
arguments.append(extent.xMinimum())
arguments.append(extent.yMinimum())
arguments.append(extent.xMaximum())
arguments.append(extent.yMaximum())
extentCrs = self.parameterAsCrs(parameters, self.TARGET_EXTENT_CRS, context)
if extentCrs:
arguments.append('-te_srs')
arguments.append(extentCrs.authid())
if self.parameterAsBool(parameters, self.MULTITHREADING, context):
arguments.append('-multi')
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.append('-co')
arguments.append(options)
arguments.append(inLayer.source())
arguments.append(out)
return ['gdalwarp', GdalUtils.escapeAndJoin(arguments)]
| stevenmizuno/QGIS | python/plugins/processing/algs/gdal/warp.py | Python | gpl-2.0 | 10,402 |
import random
class Card(object):
def __init__(self, card_value, suit):
if card_value == 1:
card_value = "Ace"
if card_value == 11:
card_value = "Jack"
if card_value == 12:
card_value = "Queen"
if card_value == 13:
card_value = "King"
self.card_value = card_value
self.suit = suit
self.next = None
def __str__(self):
return "({}, {})".format(self.card_value, self.suit)
def __cmp__(self, other):
"""
Note that in most card games there is no suit order,
instead the pot is generally split
"""
first_card = self.card_value
second_card = other.card_value
if first_card == 'Ace':
first_card = 1
if first_card == 'Jack':
first_card = 11
if first_card == 'Queen':
first_card = 12
if first_card == 'King':
first_card = 13
if second_card == 'Ace':
second_card = 1
if second_card == 'Jack':
second_card = 11
if second_card == 'Queen':
second_card = 12
if second_card == 'King':
second_card = 13
if first_card > second_card:
return 1
if first_card < second_card:
return -1
else:
return 0
class Deck(object):
def __init__(self):
self.top = None
self.size = 0
for card in xrange(1, 14):
newcard = Card(card, 'Clubs')
newcard.next = self.top
self.top = newcard
self.size += 1
for card in xrange(1, 14):
newcard = Card(card, 'Hearts')
newcard.next = self.top
self.top = newcard
self.size += 1
for card in xrange(1, 14):
newcard = Card(card, 'Diamonds')
newcard.next = self.top
self.top = newcard
self.size += 1
for card in xrange(1, 14):
newcard = Card(card, 'Spades')
newcard.next = self.top
self.top = newcard
self.size += 1
def deal(self):
if self.top is None:
raise Exception('There are no cards left in the deck!')
self.size -= 1
temp = self.top
self.top = self.top.next
return temp
def __iter__(self):
self.current = self.top
return self
def next(self):
if self.current is None:
raise StopIteration('There are no more cards in the deck!')
else:
temp = self.current
self.current = self.current.next
return temp
def size(self):
return self.size
def shuffle(self):
unshuffled_pile = []
temp_deck_pile = []
current = self.top
while current is not None:
unshuffled_pile.append(current)
current = current.next
while len(unshuffled_pile) > 0:
nextcardnum = random.randint(0, len(unshuffled_pile)-1)
temp_deck_pile.append(unshuffled_pile.pop(nextcardnum))
# for index, card in enumerate(temp_deck_pile):
# if index + 1 < len(temp_deck_pile):
# card.next = temp_deck_pile[index + 1]
self.top = temp_deck_pile.pop(0)
current = self.top
while len(temp_deck_pile) > 0:
current.next = temp_deck_pile.pop(0)
current = current.next
current.next = None
| caseymacphee/deck_of_cards | Deck.py | Python | mit | 2,790 |
# -*- coding: utf-8 -*-
import pytest
import time
import unittest.mock
from girder import events
class EventsHelper:
def __init__(self):
self.ctr = 0
self.responses = None
def _raiseException(self, event):
raise Exception('Failure condition')
def _increment(self, event):
self.ctr += event.info['amount']
def _incrementWithResponse(self, event):
self._increment(event)
event.addResponse('foo')
def _eatEvent(self, event):
event.addResponse({'foo': 'bar'})
event.stopPropagation()
event.preventDefault()
def _shouldNotBeCalled(self, event):
pytest.fail('This should not be called due to stopPropagation().')
@pytest.fixture
def eventsHelper():
yield EventsHelper()
def testSynchronousEvents(eventsHelper):
name, failname = '_test.event', '_test.failure'
handlerName = '_test.handler'
with events.bound(name, handlerName, eventsHelper._increment), \
events.bound(failname, handlerName, eventsHelper._raiseException):
# Make sure our exception propagates out of the handler
with pytest.raises(Exception, match='^Failure condition$'):
events.trigger(failname)
# Bind an event to increment the counter
assert eventsHelper.ctr == 0
event = events.trigger(name, {'amount': 2})
assert eventsHelper.ctr == 2
assert event.propagate
assert not event.defaultPrevented
assert event.responses == []
# The event should still be bound here if another handler unbinds
events.unbind(name, 'not the handler name')
events.trigger(name, {'amount': 2})
assert eventsHelper.ctr == 4
# Actually unbind the event, by going out of scope of "bound"
events.trigger(name, {'amount': 2})
assert eventsHelper.ctr == 4
# Bind an event that prevents the default action and passes a response
with events.bound(name, handlerName, eventsHelper._eatEvent), \
events.bound(name, 'other handler name',
eventsHelper._shouldNotBeCalled):
event = events.trigger(name)
assert event.defaultPrevented
assert not event.propagate
assert event.responses == [{'foo': 'bar'}]
# Test that the context manager unbinds after an unhandled exception
try:
with events.bound(failname, handlerName, eventsHelper._raiseException):
events.trigger(failname)
except Exception:
# The event should should be unbound at this point
events.trigger(failname)
@unittest.mock.patch.object(events, 'daemon', new=events.AsyncEventsThread())
def testAsyncEvents(eventsHelper):
name, failname = '_test.event', '_test.failure'
handlerName = '_test.handler'
def callback(event):
eventsHelper.ctr += 1
eventsHelper.responses = event.responses
with events.bound(failname, handlerName, eventsHelper._raiseException), \
events.bound(name, handlerName, eventsHelper._incrementWithResponse):
# Make sure an async handler that fails does not break the event
# loop and that its callback is not triggered.
assert events.daemon.eventQueue.qsize() == 0
events.daemon.trigger(failname, handlerName, callback)
# Triggering the event before the daemon starts should do nothing
assert events.daemon.eventQueue.qsize() == 1
events.daemon.trigger(name, {'amount': 2}, callback)
assert events.daemon.eventQueue.qsize() == 2
assert eventsHelper.ctr == 0
# Now run the asynchronous event handler, which should eventually
# cause our counter to be incremented.
events.daemon.start()
# Ensure that all of our events have been started within a
# reasonable amount of time. Also check the results in the loop,
# since the qsize only indicates if all events were started, not
# finished.
startTime = time.time()
while True:
if events.daemon.eventQueue.qsize() == 0:
if eventsHelper.ctr == 3:
break
if time.time() - startTime > 15:
break
time.sleep(0.1)
assert events.daemon.eventQueue.qsize() == 0
assert eventsHelper.ctr == 3
assert eventsHelper.responses == ['foo']
events.daemon.stop()
@unittest.mock.patch.object(events, 'daemon', new=events.ForegroundEventsDaemon())
def testForegroundDaemon(eventsHelper):
assert isinstance(events.daemon, events.ForegroundEventsDaemon)
# Should still be able to call start
events.daemon.start()
def callback(event):
eventsHelper.ctr += 1
eventsHelper.responses = event.responses
with events.bound('_test.event', '_test.handler', eventsHelper._raiseException):
with pytest.raises(Exception, match='^Failure condition$'):
events.daemon.trigger('_test.event', None, callback)
with events.bound('_test.event', '_test.handler', eventsHelper._incrementWithResponse):
events.daemon.trigger('_test.event', {'amount': 2}, callback)
assert eventsHelper.ctr == 3
assert eventsHelper.responses == ['foo']
events.daemon.stop()
| girder/girder | test/test_events.py | Python | apache-2.0 | 5,259 |
#!/usr/bin/env python
#
# Copyright 2009,2010,2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import math
from gnuradio import gr, fft, blocks
from . import optfir
from . import filter_swig as filter
class channelizer_ccf(gr.hier_block):
'''
Make a Polyphase Filter channelizer (complex in, complex out, floating-point taps)
This simplifies the interface by allowing a single input stream to connect to this block.
It will then output a stream for each channel.
'''
def __init__(self, numchans, taps=None, oversample_rate=1, atten=100):
gr.hier_block.__init__(self, "pfb_channelizer_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(numchans, numchans, gr.sizeof_gr_complex))
self._nchans = numchans
self._oversample_rate = oversample_rate
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the input signal
bw = 0.4
tb = 0.2
ripple = 0.1
made = False
while not made:
try:
self._taps = optfir.low_pass(1, self._nchans, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.s2ss = blocks.stream_to_streams(gr.sizeof_gr_complex, self._nchans)
self.pfb = filter.pfb_channelizer_ccf(self._nchans, self._taps,
self._oversample_rate)
self.connect(self, self.s2ss)
for i in range(self._nchans):
self.connect((self.s2ss,i), (self.pfb,i))
self.connect((self.pfb,i), (self,i))
def set_channel_map(self, newmap):
self.pfb.set_channel_map(newmap)
def set_taps(self, taps):
self.pfb.set_taps(taps)
def taps(self):
return self.pfb.taps()
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class interpolator_ccf(gr.hier_block):
'''
Make a Polyphase Filter interpolator (complex in, complex out, floating-point taps)
The block takes a single complex stream in and outputs a single complex
stream out. As such, it requires no extra glue to handle the input/output
streams. This block is provided to be consistent with the interface to the
other PFB block.
'''
def __init__(self, interp, taps=None, atten=100):
gr.hier_block.__init__(self, "pfb_interpolator_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_gr_complex))
self._interp = interp
self._taps = taps
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the input signal
bw = 0.4
tb = 0.2
ripple = 0.99
made = False
while not made:
try:
self._taps = optfir.low_pass(self._interp, self._interp, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.pfb = filter.pfb_interpolator_ccf(self._interp, self._taps)
self.connect(self, self.pfb)
self.connect(self.pfb, self)
def set_taps(self, taps):
self.pfb.set_taps(taps)
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class decimator_ccf(gr.hier_block):
'''
Make a Polyphase Filter decimator (complex in, complex out, floating-point taps)
This simplifies the interface by allowing a single input stream to connect to this block.
It will then output a stream that is the decimated output stream.
'''
def __init__(self, decim, taps=None, channel=0, atten=100,
use_fft_rotators=True, use_fft_filters=True):
gr.hier_block.__init__(self, "pfb_decimator_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_gr_complex))
self._decim = decim
self._channel = channel
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the input signal
bw = 0.4
tb = 0.2
ripple = 0.1
made = False
while not made:
try:
self._taps = optfir.low_pass(1, self._decim, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.s2ss = blocks.stream_to_streams(gr.sizeof_gr_complex, self._decim)
self.pfb = filter.pfb_decimator_ccf(self._decim, self._taps, self._channel,
use_fft_rotators, use_fft_filters)
self.connect(self, self.s2ss)
for i in range(self._decim):
self.connect((self.s2ss,i), (self.pfb,i))
self.connect(self.pfb, self)
def set_taps(self, taps):
self.pfb.set_taps(taps)
def set_channel(self, chan):
self.pfb.set_channel(chan)
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class arb_resampler_ccf(gr.hier_block):
'''
Convenience wrapper for the polyphase filterbank arbitrary resampler.
The block takes a single complex stream in and outputs a single complex
stream out. As such, it requires no extra glue to handle the input/output
streams. This block is provided to be consistent with the interface to the
other PFB block.
'''
def __init__(self, rate, taps=None, flt_size=32, atten=100):
gr.hier_block.__init__(self, "pfb_arb_resampler_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._rate = rate
self._size = flt_size
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the output signal
# If rate >= 1, we need to prevent images in the output,
# so we have to filter it to less than half the channel
# width of 0.5. If rate < 1, we need to filter to less
# than half the output signal's bw to avoid aliasing, so
# the half-band here is 0.5*rate.
percent = 0.80
if(self._rate < 1):
halfband = 0.5*self._rate
bw = percent*halfband
tb = (percent / 2.0)*halfband
ripple = 0.1
# As we drop the bw factor, the optfir filter has a harder time converging;
# using the firdes method here for better results.
self._taps = filter.firdes.low_pass_2(self._size, self._size, bw, tb, atten,
filter.firdes.WIN_BLACKMAN_HARRIS)
else:
halfband = 0.5
bw = percent*halfband
tb = (percent / 2.0)*halfband
ripple = 0.1
made = False
while not made:
try:
self._taps = optfir.low_pass(self._size, self._size, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.pfb = filter.pfb_arb_resampler_ccf(self._rate, self._taps, self._size)
#print("PFB has %d taps\n" % (len(self._taps),))
self.connect(self, self.pfb)
self.connect(self.pfb, self)
# Note -- set_taps not implemented in base class yet
def set_taps(self, taps):
self.pfb.set_taps(taps)
def set_rate(self, rate):
self.pfb.set_rate(rate)
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class arb_resampler_fff(gr.hier_block):
'''
Convenience wrapper for the polyphase filterbank arbitrary resampler.
The block takes a single float stream in and outputs a single float
stream out. As such, it requires no extra glue to handle the input/output
streams. This block is provided to be consistent with the interface to the
other PFB block.
'''
def __init__(self, rate, taps=None, flt_size=32, atten=100):
gr.hier_block.__init__(self, "pfb_arb_resampler_fff",
gr.io_signature(1, 1, gr.sizeof_float), # Input signature
gr.io_signature(1, 1, gr.sizeof_float)) # Output signature
self._rate = rate
self._size = flt_size
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the input signal
# If rate >= 1, we need to prevent images in the output,
# so we have to filter it to less than half the channel
# width of 0.5. If rate < 1, we need to filter to less
# than half the output signal's bw to avoid aliasing, so
# the half-band here is 0.5*rate.
percent = 0.80
if(self._rate < 1):
halfband = 0.5*self._rate
bw = percent*halfband
tb = (percent / 2.0)*halfband
ripple = 0.1
# As we drop the bw factor, the optfir filter has a harder time converging;
# using the firdes method here for better results.
self._taps = filter.firdes.low_pass_2(self._size, self._size, bw, tb, atten,
filter.firdes.WIN_BLACKMAN_HARRIS)
else:
halfband = 0.5
bw = percent*halfband
tb = (percent / 2.0)*halfband
ripple = 0.1
made = False
while not made:
try:
self._taps = optfir.low_pass(self._size, self._size, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.pfb = filter.pfb_arb_resampler_fff(self._rate, self._taps, self._size)
#print "PFB has %d taps\n" % (len(self._taps),)
self.connect(self, self.pfb)
self.connect(self.pfb, self)
# Note -- set_taps not implemented in base class yet
def set_taps(self, taps):
self.pfb.set_taps(taps)
def set_rate(self, rate):
self.pfb.set_rate(rate)
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class arb_resampler_ccc(gr.hier_block):
'''
Convenience wrapper for the polyphase filterbank arbitrary resampler.
The block takes a single complex stream in and outputs a single complex
stream out. As such, it requires no extra glue to handle the input/output
streams. This block is provided to be consistent with the interface to the
other PFB block.
'''
def __init__(self, rate, taps=None, flt_size=32, atten=100):
gr.hier_block.__init__(self, "pfb_arb_resampler_ccc",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._rate = rate
self._size = flt_size
if (taps is not None) and (len(taps) > 0):
self._taps = taps
else:
# Create a filter that covers the full bandwidth of the input signal
bw = 0.4
tb = 0.2
ripple = 0.1
#self._taps = filter.firdes.low_pass_2(self._size, self._size, bw, tb, atten)
made = False
while not made:
try:
self._taps = optfir.low_pass(self._size, self._size, bw, bw+tb, ripple, atten)
made = True
except RuntimeError:
ripple += 0.01
made = False
print("Warning: set ripple to %.4f dB. If this is a problem, adjust the attenuation or create your own filter taps." % (ripple))
# Build in an exit strategy; if we've come this far, it ain't working.
if(ripple >= 1.0):
raise RuntimeError("optfir could not generate an appropriate filter.")
self.pfb = filter.pfb_arb_resampler_ccc(self._rate, self._taps, self._size)
#print "PFB has %d taps\n" % (len(self._taps),)
self.connect(self, self.pfb)
self.connect(self.pfb, self)
# Note -- set_taps not implemented in base class yet
def set_taps(self, taps):
self.pfb.set_taps(taps)
def set_rate(self, rate):
self.pfb.set_rate(rate)
def declare_sample_delay(self, delay):
self.pfb.declare_sample_delay(delay)
class channelizer_hier_ccf(gr.hier_block):
"""
Make a Polyphase Filter channelizer (complex in, complex out, floating-point taps)
Args:
n_chans: The number of channels to split into.
n_filterbanks: The number of filterbank blocks to use (default=2).
taps: The taps to use. If this is `None` then taps are generated using optfir.low_pass.
outchans: Which channels to output streams for (a list of integers) (default is all channels).
atten: Stop band attenuation.
bw: The fraction of the channel you want to keep.
tb: Transition band with as fraction of channel width.
ripple: Pass band ripple in dB.
"""
def __init__(self, n_chans, n_filterbanks=1, taps=None, outchans=None,
atten=100, bw=1.0, tb=0.2, ripple=0.1):
if n_filterbanks > n_chans:
n_filterbanks = n_chans
if outchans is None:
outchans = list(range(n_chans))
gr.hier_block.__init__(
self, "pfb_channelizer_hier_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(len(outchans), len(outchans), gr.sizeof_gr_complex))
if taps is None:
taps = optfir.low_pass(1, n_chans, bw, bw+tb, ripple, atten)
taps = list(taps)
extra_taps = int(math.ceil(1.0*len(taps)/n_chans)*n_chans - len(taps))
taps = taps + [0] * extra_taps
# Make taps for each channel
chantaps = [list(reversed(taps[i: len(taps): n_chans])) for i in range(0, n_chans)]
# Convert the input stream into a stream of vectors.
self.s2v = blocks.stream_to_vector(gr.sizeof_gr_complex, n_chans)
# Create a mapping to separate out each filterbank (a group of channels to be processed together)
# And a list of sets of taps for each filterbank.
low_cpp = int(n_chans / n_filterbanks)
extra = n_chans - low_cpp*n_filterbanks
cpps = [low_cpp+1]*extra + [low_cpp]*(n_filterbanks-extra)
splitter_mapping = []
filterbanktaps = []
total = 0
for cpp in cpps:
splitter_mapping.append([(0, i) for i in range(total, total+cpp)])
filterbanktaps.append(chantaps[total: total+cpp])
total += cpp
assert(total == n_chans)
# Split the stream of vectors in n_filterbanks streams of vectors.
self.splitter = blocks.vector_map(gr.sizeof_gr_complex, [n_chans], splitter_mapping)
# Create the filterbanks
self.fbs = [filter.filterbank_vcvcf(taps) for taps in filterbanktaps]
# Combine the streams of vectors back into a single stream of vectors.
combiner_mapping = [[]]
for i, cpp in enumerate(cpps):
for j in range(cpp):
combiner_mapping[0].append((i, j))
self.combiner = blocks.vector_map(gr.sizeof_gr_complex, cpps, combiner_mapping)
# Add the final FFT to the channelizer.
self.fft = fft.fft_vcc(n_chans, forward=True, window=[1.0]*n_chans)
# Select the desired channels
if outchans != list(range(n_chans)):
selector_mapping = [[(0, i) for i in outchans]]
self.selector = blocks.vector_map(gr.sizeof_gr_complex, [n_chans], selector_mapping)
# Convert stream of vectors to a normal stream.
self.v2ss = blocks.vector_to_streams(gr.sizeof_gr_complex, len(outchans))
self.connect(self, self.s2v, self.splitter)
for i in range(0, n_filterbanks):
self.connect((self.splitter, i), self.fbs[i], (self.combiner, i))
self.connect(self.combiner, self.fft)
if outchans != list(range(n_chans)):
self.connect(self.fft, self.selector, self.v2ss)
else:
self.connect(self.fft, self.v2ss)
for i in range(0, len(outchans)):
self.connect((self.v2ss, i), (self, i))
| bastibl/gnuradio | gr-filter/python/filter/pfb.py | Python | gpl-3.0 | 20,042 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_files\structure_dialog.ui'
#
# Created: Tue May 09 09:41:16 2017
# by: pyside-uic 0.2.14 running on PySide 1.1.1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(754, 662)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.dialog_label = QtGui.QLabel(Dialog)
self.dialog_label.setStyleSheet("color: rgb(71, 143, 202);\n" "font: 18pt;")
self.dialog_label.setObjectName("dialog_label")
self.verticalLayout.addWidget(self.dialog_label)
self.line = QtGui.QFrame(Dialog)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.formLayout = QtGui.QFormLayout()
self.formLayout.setLabelAlignment(
QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter
)
self.formLayout.setObjectName("formLayout")
self.name_label = QtGui.QLabel(Dialog)
self.name_label.setObjectName("name_label")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.name_label)
self.name_fields_verticalLayout = QtGui.QVBoxLayout()
self.name_fields_verticalLayout.setObjectName("name_fields_verticalLayout")
self.name_validator_label = QtGui.QLabel(Dialog)
self.name_validator_label.setStyleSheet("color: rgb(255, 0, 0);")
self.name_validator_label.setObjectName("name_validator_label")
self.name_fields_verticalLayout.addWidget(self.name_validator_label)
self.formLayout.setLayout(
0, QtGui.QFormLayout.FieldRole, self.name_fields_verticalLayout
)
self.filenmate_templates_label = QtGui.QLabel(Dialog)
self.filenmate_templates_label.setObjectName("filenmate_templates_label")
self.formLayout.setWidget(
1, QtGui.QFormLayout.LabelRole, self.filenmate_templates_label
)
self.filename_template_fields_verticalLayout = QtGui.QVBoxLayout()
self.filename_template_fields_verticalLayout.setObjectName(
"filename_template_fields_verticalLayout"
)
self.formLayout.setLayout(
1, QtGui.QFormLayout.FieldRole, self.filename_template_fields_verticalLayout
)
self.custom_template_label = QtGui.QLabel(Dialog)
self.custom_template_label.setObjectName("custom_template_label")
self.formLayout.setWidget(
2, QtGui.QFormLayout.LabelRole, self.custom_template_label
)
self.custom_template_plainTextEdit = QtGui.QPlainTextEdit(Dialog)
self.custom_template_plainTextEdit.setObjectName(
"custom_template_plainTextEdit"
)
self.formLayout.setWidget(
2, QtGui.QFormLayout.FieldRole, self.custom_template_plainTextEdit
)
self.verticalLayout.addLayout(self.formLayout)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(
QtGui.QDialogButtonBox.Cancel | QtGui.QDialogButtonBox.Ok
)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.verticalLayout.setStretch(2, 1)
self.retranslateUi(Dialog)
QtCore.QObject.connect(
self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept
)
QtCore.QObject.connect(
self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject
)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(
QtGui.QApplication.translate(
"Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8
)
)
self.dialog_label.setText(
QtGui.QApplication.translate(
"Dialog", "Create Structure", None, QtGui.QApplication.UnicodeUTF8
)
)
self.name_label.setText(
QtGui.QApplication.translate(
"Dialog", "Name", None, QtGui.QApplication.UnicodeUTF8
)
)
self.name_validator_label.setText(
QtGui.QApplication.translate(
"Dialog", "Validator Message", None, QtGui.QApplication.UnicodeUTF8
)
)
self.filenmate_templates_label.setText(
QtGui.QApplication.translate(
"Dialog",
'<html><head/><body><p align="right">Filename<br/>Templates</p></body></html>',
None,
QtGui.QApplication.UnicodeUTF8,
)
)
self.custom_template_label.setText(
QtGui.QApplication.translate(
"Dialog",
'<html><head/><body><p align="right">Custom<br/>Template</p></body></html>',
None,
QtGui.QApplication.UnicodeUTF8,
)
)
| eoyilmaz/anima | anima/ui/ui_compiled/structure_dialog_UI_pyside.py | Python | mit | 5,265 |
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2015 SciFabric LTD.
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
This module exports all the extensions used by PyBossa.
The objects are:
* sentinel: for caching data, ratelimiting, etc.
* signer: for signing emails, cookies, etc.
* mail: for sending emails,
* login_manager: to handle account sigin/signout
* facebook: for Facebook signin
* twitter: for Twitter signin
* google: for Google signin
* misaka: for app.long_description markdown support,
* babel: for i18n support,
* uploader: for file uploads support,
* csrf: for CSRF protection
* newsletter: for subscribing users to Mailchimp newsletter
* assets: for assets management (SASS, etc.)
"""
__all__ = ['sentinel', 'db', 'signer', 'mail', 'login_manager', 'facebook',
'twitter', 'google', 'misaka', 'babel', 'uploader', 'debug_toolbar',
'csrf', 'timeouts', 'ratelimits', 'user_repo', 'project_repo',
'task_repo', 'blog_repo', 'auditlog_repo', 'newsletter', 'importer',
'flickr', 'plugin_manager', 'assets']
# CACHE
from pybossa.sentinel import Sentinel
sentinel = Sentinel()
# DB
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
db.slave_session = db.session
# Repositories
user_repo = None
project_repo = None
blog_repo = None
task_repo = None
auditlog_repo = None
# Signer
from pybossa.signer import Signer
signer = Signer()
# Mail
from flask.ext.mail import Mail
mail = Mail()
# Login Manager
from flask.ext.login import LoginManager
login_manager = LoginManager()
# Debug Toolbar
from flask.ext.debugtoolbar import DebugToolbarExtension
debug_toolbar = DebugToolbarExtension()
# OAuth providers
from pybossa.oauth_providers import Facebook
facebook = Facebook()
from pybossa.oauth_providers import Twitter
twitter = Twitter()
from pybossa.oauth_providers import Google
google = Google()
from pybossa.oauth_providers import Flickr
flickr = Flickr()
# Markdown support
from flask.ext.misaka import Misaka
misaka = Misaka()
# Babel
from flask.ext.babel import Babel
babel = Babel()
# Uploader
uploader = None
# Exporters
json_exporter = None
csv_exporter = None
# CSRF protection
from flask_wtf.csrf import CsrfProtect
csrf = CsrfProtect()
# Timeouts
timeouts = dict()
# Ratelimits
ratelimits = dict()
# Newsletter
from newsletter import Newsletter
newsletter = Newsletter()
# Importer
from importers import Importer
importer = Importer()
from flask.ext.plugins import PluginManager
plugin_manager = PluginManager()
from flask.ext.assets import Environment
assets = Environment()
| geotagx/pybossa | pybossa/extensions.py | Python | agpl-3.0 | 3,249 |
__author__ = 'Stefan Contiu'
# RESULTS LOG : July 7th, 2015
# Accuracy : 0.993837304848
# Confusion Matrix :
# [[3044 11 2 1]
# [ 2 83 0 0]
# [ 0 0 766 13]
# [ 0 0 1 945]]
from time import time
###############################################
# load from csv training and testing sets
from numpy import genfromtxt
features_test = genfromtxt('d:/CODE/ml-crops/preproc/dataset/features_train.csv', delimiter=',')
classes_test = genfromtxt('d:/CODE/ml-crops/preproc/dataset/classes_train.csv', delimiter=',')
features_train = genfromtxt('d:/CODE/ml-crops/preproc/dataset/features_test.csv', delimiter=',')
classes_train = genfromtxt('d:/CODE/ml-crops/preproc/dataset/classes_test.csv', delimiter=',')
###############################################
# perform DecisionTree classification
from sklearn import tree
clf = tree.DecisionTreeClassifier()
fit_start_time = time()
clf.fit(features_train, classes_train)
fit_end_time = time()
print "\nTraining time : ", round(fit_end_time - fit_start_time, 3), "s"
###############################################
# predict
predict_start_time = time()
classes_predicted = clf.predict(features_test)
predict_end_time = time()
print "Preciting time : ", round(predict_end_time - predict_start_time, 3), "s"
###############################################
# get accuracy
from sklearn.metrics import accuracy_score
from sklearn.metrics import confusion_matrix
print "\nAccuracy : ", accuracy_score(classes_test, classes_predicted)
print "Confusion Matrix : \n", confusion_matrix(classes_test, classes_predicted) | stefan-contiu/ml-crops | DecisionTrees/dt_ml_crops.py | Python | mit | 1,585 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'HouseholdMemberGroup.order'
db.alter_column(u'survey_householdmembergroup', 'order', self.gf('django.db.models.fields.PositiveIntegerField')(unique=True, max_length=5))
def backwards(self, orm):
# Changing field 'HouseholdMemberGroup.order'
db.alter_column(u'survey_householdmembergroup', 'order', self.gf('django.db.models.fields.IntegerField')(max_length=5, unique=True))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'locations.location': {
'Meta': {'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'parent_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'point': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Point']", 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['locations.Location']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locations'", 'null': 'True', 'to': u"orm['locations.LocationType']"})
},
u'locations.locationtype': {
'Meta': {'object_name': 'LocationType'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'primary_key': 'True'})
},
u'locations.point': {
'Meta': {'object_name': 'Point'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'max_digits': '13', 'decimal_places': '10'})
},
'survey.answerrule': {
'Meta': {'object_name': 'AnswerRule'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'condition': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'next_question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_question_rules'", 'null': 'True', 'to': "orm['survey.Question']"}),
'question': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'rule'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Question']"}),
'validate_with_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.QuestionOption']", 'null': 'True'}),
'validate_with_question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'validate_with_value': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'})
},
'survey.backend': {
'Meta': {'object_name': 'Backend'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'})
},
'survey.batch': {
'Meta': {'object_name': 'Batch'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'})
},
'survey.batchlocationstatus': {
'Meta': {'object_name': 'BatchLocationStatus'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'open_locations'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'open_batches'", 'null': 'True', 'to': u"orm['locations.Location']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
'survey.formula': {
'Meta': {'object_name': 'Formula'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'formula'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'denominator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'as_denominator'", 'to': "orm['survey.Question']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'numerator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'as_numerator'", 'to': "orm['survey.Question']"})
},
'survey.groupcondition': {
'Meta': {'object_name': 'GroupCondition'},
'attribute': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'condition': ('django.db.models.fields.CharField', [], {'default': "'EQUALS'", 'max_length': '20'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'conditions'", 'symmetrical': 'False', 'to': "orm['survey.HouseholdMemberGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'survey.household': {
'Meta': {'object_name': 'Household'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'households'", 'null': 'True', 'to': "orm['survey.Investigator']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'uid': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'unique': 'True'})
},
'survey.householdbatchcompletion': {
'Meta': {'object_name': 'HouseholdBatchCompletion'},
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_households'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_batches'", 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'completed_batches'", 'null': 'True', 'to': "orm['survey.Investigator']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
'survey.householdhead': {
'Meta': {'object_name': 'HouseholdHead'},
'age': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'household': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'head'", 'unique': 'True', 'null': 'True', 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'default': "'Primary'", 'max_length': '100', 'null': 'True'}),
'male': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'default': "'16'", 'max_length': '100'}),
'resident_since_month': ('django.db.models.fields.PositiveIntegerField', [], {'default': '5'}),
'resident_since_year': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1984'}),
'surname': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'})
},
'survey.householdmember': {
'Meta': {'object_name': 'HouseholdMember'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'household_member'", 'to': "orm['survey.Household']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'male': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'survey.householdmembergroup': {
'Meta': {'object_name': 'HouseholdMemberGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'unique': 'True', 'max_length': '5'})
},
'survey.investigator': {
'Meta': {'object_name': 'Investigator'},
'age': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'backend': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Backend']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'English'", 'max_length': '100', 'null': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'default': "'Primary'", 'max_length': '100', 'null': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']", 'null': 'True'}),
'male': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'weights': ('django.db.models.fields.FloatField', [], {'default': '0'})
},
'survey.locationautocomplete': {
'Meta': {'object_name': 'LocationAutoComplete'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']", 'null': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
'survey.multichoiceanswer': {
'Meta': {'object_name': 'MultiChoiceAnswer'},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.QuestionOption']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.numericalanswer': {
'Meta': {'object_name': 'NumericalAnswer'},
'answer': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '5', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.question': {
'Meta': {'object_name': 'Question'},
'answer_type': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'batch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'null': 'True', 'to': "orm['survey.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'null': 'True', 'to': "orm['survey.Question']"}),
'subquestion': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'survey.questionoption': {
'Meta': {'object_name': 'QuestionOption'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'null': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'null': 'True', 'to': "orm['survey.Question']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
'survey.randomhouseholdselection': {
'Meta': {'object_name': 'RandomHouseHoldSelection'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'no_of_households': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'selected_households': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'survey.textanswer': {
'Meta': {'object_name': 'TextAnswer'},
'answer': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'household': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Household']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'investigator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Investigator']", 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.Question']", 'null': 'True'}),
'rule_applied': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['survey.AnswerRule']", 'null': 'True'})
},
'survey.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': u"orm['auth.User']"})
}
}
complete_apps = ['survey'] | antsmc2/mics | survey/migrations/0057_auto__chg_field_householdmembergroup_order.py | Python | bsd-3-clause | 24,137 |
import numpy as np
from mpi4py import MPI
from veros import runtime_settings as rs, runtime_state as rst
from veros.distributed import gather
if rst.proc_num == 1:
import sys
comm = MPI.COMM_SELF.Spawn(sys.executable, args=["-m", "mpi4py", sys.argv[-1]], maxprocs=4)
res = np.empty((8, 8))
comm.Recv(res, 0)
np.testing.assert_array_equal(
res,
np.array(
[
[0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0],
[0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0],
[0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0],
[0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 2.0],
[1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 3.0, 3.0],
[1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 3.0, 3.0],
[1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 3.0, 3.0],
[1.0, 1.0, 1.0, 1.0, 3.0, 3.0, 3.0, 3.0],
]
),
)
else:
rs.num_proc = (2, 2)
assert rst.proc_num == 4
from veros.core.operators import numpy as npx
dimensions = dict(xt=4, yt=4)
a = rst.proc_rank * npx.ones((6, 6))
b = gather(a, dimensions, ("xt", "yt"))
if rst.proc_rank == 0:
rs.mpi_comm.Get_parent().Send(np.array(b), 0)
| dionhaefner/veros | test/distributed/gather_kernel.py | Python | mit | 1,219 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api, _
class AccountPayment(models.Model):
_inherit = 'account.payment'
def action_process_edi_web_services(self):
return self.move_id.action_process_edi_web_services()
def action_retry_edi_documents_error(self):
self.ensure_one()
return self.move_id.action_retry_edi_documents_error()
| jeremiahyan/odoo | addons/account_edi/models/account_payment.py | Python | gpl-3.0 | 459 |
import collections
import logging
import os
import re
import subprocess
import sys
from scltests import collection, settings, cfg
from scltests.misc import createrepo, rename_logs
logger = logging.getLogger(__name__)
class BuildCollection(object):
dep_config = None
def __init__(self, scl_name, config_name, local_scl):
if BuildCollection.dep_config:
self._mock_config = BuildCollection.dep_config
BuildCollection.dep_config = None
self.config_name = config_name
self.scl_name = scl_name
self.built = False
self.local_scl = local_scl
self._srpms = []
@property
def mock_config(self):
if not hasattr(self, '_mock_config'):
self._mock_config = cfg.prepare(self.config_name, self.local_scl)
createrepo(self._mock_config.result_dir)
logger.info('Creating repo at {0}'.format(self._mock_config.result_dir))
return self._mock_config
@property
def scl(self):
if not hasattr(self, '_scl'):
self._scl = collection.Collection(self.scl_name)
return self._scl
@property
def rpms(self):
"""
Returns list of built rpms.
"""
if self.built:
dir_content = os.listdir(self.mock_config.result_dir)
return [f for f in dir_content if re.search("^{0}.*(?<!src)\.rpm$".format(self.scl_name), f)]
return []
@property
def rpms_dict(self):
"""
Returns dictionary consisting of built rpms in format:
{rpm_stripped_of_arch_and_dist: (rpm, full_path_to_rpm)}
{foo-2.7-1: (foo-2.7-1.f19.noarch.rpm, /path/to/foo-2.7-1.f19.noarch.rpm)}
"""
if not hasattr(self, '_rpms_dict'):
rpm_info = collections.namedtuple('rpm_info', 'rpm rpm_path')
self._rpms_dict = {self.rpm_strip(rpm): rpm_info(rpm, self.rpm_path(rpm)) for rpm in self.rpms}
return self._rpms_dict
def rpm_strip(self, package):
"""
Strip information about arch and dist from rpm name.
foo-2.7-1.f19.noarch.rpm -> foo-2.7-1
"""
arch = self.mock_config['target_arch']
if '.noarch.' in package:
arch = 'noarch'
return package.replace('.{0}.{1}.rpm'.format(self.mock_config.dist, arch), '')
def rpm_path(self, package):
"""
Returns full path to given package.
"""
return os.path.join(self.mock_config.result_dir, package)
def _build_rpm(self, path_to_specfile):
"""
Build single srpm with mock.
"""
path_to_srpm = self.make_srpm(path_to_specfile)
srpm = os.path.basename(path_to_srpm)
logger.info('Starting build of {0}'.format(srpm))
with open('/dev/null', 'w') as devnull:
code = subprocess.call(['mock', '-r', self.mock_config.name, '--configdir',
self.mock_config.config_dir, '--resultdir', self.mock_config.result_dir,
path_to_srpm], stdout=devnull, stderr=devnull)
if not code:
logger.info('Package {0} was built successfully'.format(srpm))
createrepo(self.mock_config.result_dir)
logger.info('Updating repository')
else:
logger.info('{0} build failed'.format(srpm))
rename_logs(path_to_srpm, self.mock_config.result_dir)
return code
def make_srpm(self, specfile):
"""
Create srpm from specfile in SRPMS folder.
"""
scl_dir = '{0}/{1}'.format(settings.SRPMS_DIR, self.scl_name)
msg = subprocess.check_output(['rpmbuild',
'--define', '_sourcedir {0}'.format(scl_dir),
'--define', '_builddir {0}'.format(scl_dir),
'--define', '_srcrpmdir {0}'.format(scl_dir),
'--define', '_rpmdir {0}'.format(scl_dir),
'-bs', specfile])
srpm = re.search(r'\/.*\.rpm', msg.strip().decode('utf-8')).group()
self._srpms.append(srpm)
logger.info('Created {0} from {1}'.format(os.path.basename(srpm), os.path.basename(specfile)))
return srpm
def delete_srpms(self):
for srpm in self._srpms:
os.remove(srpm)
logger.info('SRPMS deleted')
def build(self):
"""
Build whole collection.
Metapackage is built as first. Metapackage and build order is defined
in the yaml file of collection.
"""
try:
meta_return_code = self._build_rpm(self.scl.meta)
if meta_return_code:
sys.exit('ERROR: Metapackage wasn\'t built, exiting.')
self.mock_config.edit_opt('chroot_setup_cmd', ' {0}-build'.format(self.scl.name))
for package in self.scl.packages:
self._build_rpm(package)
if self.scl.dependant:
BuildCollection.dep_config = self.mock_config
self.built = True
except KeyboardInterrupt:
self.built = False
raise
finally:
self.mock_config.reset()
self.delete_srpms()
| sclorg/scltests | scltests/build.py | Python | gpl-2.0 | 5,282 |
"""
A sliding window of the sample readings.
"""
from collections import namedtuple
import numpy as np
Window = namedtuple(
"Window",
["size", "data"]
)
def create_window(capacity, attrs_count):
""" Creates an initial sliding window. """
window = Window(0, np.zeros((capacity, attrs_count)))
return window
def update_window(window, point):
""" Updates the sliding window with one new reading """
if window.size == len(window.data):
# shift the window
next_data = np.vstack([ window.data[1:], np.zeros((1, window.data.shape[1])) ])
next_size = window.size
else:
next_size = window.size + 1
next_data = window.data
next_data[next_size - 1] = point
return Window(next_size, next_data)
| simonrozsival/mff-ai-anomaly-detector | detector/src/window.py | Python | mit | 769 |
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/renderbase.py
__version__=''' $Id $ '''
__doc__='''Superclass for renderers to factor out common functionality and default implementations.'''
from reportlab.graphics.shapes import *
from reportlab.lib.validators import DerivedValue
from reportlab import rl_config
def inverse(A):
"For A affine 2D represented as 6vec return 6vec version of A**(-1)"
# I checked this RGB
det = float(A[0]*A[3] - A[2]*A[1])
R = [A[3]/det, -A[1]/det, -A[2]/det, A[0]/det]
return tuple(R+[-R[0]*A[4]-R[2]*A[5],-R[1]*A[4]-R[3]*A[5]])
def mmult(A, B):
"A postmultiplied by B"
# I checked this RGB
# [a0 a2 a4] [b0 b2 b4]
# [a1 a3 a5] * [b1 b3 b5]
# [ 1 ] [ 1 ]
#
return (A[0]*B[0] + A[2]*B[1],
A[1]*B[0] + A[3]*B[1],
A[0]*B[2] + A[2]*B[3],
A[1]*B[2] + A[3]*B[3],
A[0]*B[4] + A[2]*B[5] + A[4],
A[1]*B[4] + A[3]*B[5] + A[5])
def getStateDelta(shape):
"""Used to compute when we need to change the graphics state.
For example, if we have two adjacent red shapes we don't need
to set the pen color to red in between. Returns the effect
the given shape would have on the graphics state"""
delta = {}
for (prop, value) in shape.getProperties().items():
if prop in STATE_DEFAULTS:
delta[prop] = value
return delta
class StateTracker:
"""Keeps a stack of transforms and state
properties. It can contain any properties you
want, but the keys 'transform' and 'ctm' have
special meanings. The getCTM()
method returns the current transformation
matrix at any point, without needing to
invert matrixes when you pop."""
def __init__(self, defaults=None):
# one stack to keep track of what changes...
self._deltas = []
# and another to keep track of cumulative effects. Last one in
# list is the current graphics state. We put one in to simplify
# loops below.
self._combined = []
if defaults is None:
defaults = STATE_DEFAULTS.copy()
#ensure that if we have a transform, we have a CTM
if 'transform' in defaults:
defaults['ctm'] = defaults['transform']
self._combined.append(defaults)
def push(self,delta):
"""Take a new state dictionary of changes and push it onto
the stack. After doing this, the combined state is accessible
through getState()"""
newstate = self._combined[-1].copy()
for (key, value) in delta.items():
if key == 'transform': #do cumulative matrix
newstate['transform'] = delta['transform']
newstate['ctm'] = mmult(self._combined[-1]['ctm'], delta['transform'])
#print 'statetracker transform = (%0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f)' % tuple(newstate['transform'])
#print 'statetracker ctm = (%0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f)' % tuple(newstate['ctm'])
else: #just overwrite it
newstate[key] = value
self._combined.append(newstate)
self._deltas.append(delta)
def pop(self):
"""steps back one, and returns a state dictionary with the
deltas to reverse out of wherever you are. Depending
on your back end, you may not need the return value,
since you can get the complete state afterwards with getState()"""
del self._combined[-1]
newState = self._combined[-1]
lastDelta = self._deltas[-1]
del self._deltas[-1]
#need to diff this against the last one in the state
reverseDelta = {}
#print 'pop()...'
for key, curValue in lastDelta.items():
#print ' key=%s, value=%s' % (key, curValue)
prevValue = newState[key]
if prevValue != curValue:
#print ' state popping "%s"="%s"' % (key, curValue)
if key == 'transform':
reverseDelta[key] = inverse(lastDelta['transform'])
else: #just return to previous state
reverseDelta[key] = prevValue
return reverseDelta
def getState(self):
"returns the complete graphics state at this point"
return self._combined[-1]
def getCTM(self):
"returns the current transformation matrix at this point"""
return self._combined[-1]['ctm']
def __getitem__(self,key):
"returns the complete graphics state value of key at this point"
return self._combined[-1][key]
def __setitem__(self,key,value):
"sets the complete graphics state value of key to value"
self._combined[-1][key] = value
def testStateTracker():
print 'Testing state tracker'
defaults = {'fillColor':None, 'strokeColor':None,'fontName':None, 'transform':[1,0,0,1,0,0]}
from reportlab.graphics.shapes import _baseGFontName
deltas = [
{'fillColor':'red'},
{'fillColor':'green', 'strokeColor':'blue','fontName':_baseGFontName},
{'transform':[0.5,0,0,0.5,0,0]},
{'transform':[0.5,0,0,0.5,2,3]},
{'strokeColor':'red'}
]
st = StateTracker(defaults)
print 'initial:', st.getState()
print
for delta in deltas:
print 'pushing:', delta
st.push(delta)
print 'state: ',st.getState(),'\n'
for delta in deltas:
print 'popping:',st.pop()
print 'state: ',st.getState(),'\n'
def _expandUserNode(node,canvas):
if isinstance(node, UserNode):
try:
if hasattr(node,'_canvas'):
ocanvas = 1
else:
node._canvas = canvas
ocanvas = None
onode = node
node = node.provideNode()
finally:
if not ocanvas: del onode._canvas
return node
def renderScaledDrawing(d):
renderScale = d.renderScale
if renderScale!=1.0:
o = d
d = d.__class__(o.width*renderScale,o.height*renderScale)
d.__dict__ = o.__dict__.copy()
d.scale(renderScale,renderScale)
d.renderScale = 1.0
return d
class Renderer:
"""Virtual superclass for graphics renderers."""
def __init__(self):
self._tracker = StateTracker()
self._nodeStack = [] #track nodes visited
def undefined(self, operation):
raise ValueError, "%s operation not defined at superclass class=%s" %(operation, self.__class__)
def draw(self, drawing, canvas, x=0, y=0, showBoundary=rl_config._unset_):
"""This is the top level function, which draws the drawing at the given
location. The recursive part is handled by drawNode."""
#stash references for ease of communication
if showBoundary is rl_config._unset_: showBoundary=rl_config.showBoundary
self._canvas = canvas
canvas.__dict__['_drawing'] = self._drawing = drawing
drawing._parent = None
try:
#bounding box
if showBoundary: canvas.rect(x, y, drawing.width, drawing.height)
canvas.saveState()
self.initState(x,y) #this is the push()
self.drawNode(drawing)
self.pop()
canvas.restoreState()
finally:
#remove any circular references
del self._canvas, self._drawing, canvas._drawing, drawing._parent
def initState(self,x,y):
deltas = STATE_DEFAULTS.copy()
deltas['transform'] = [1,0,0,1,x,y]
self._tracker.push(deltas)
self.applyStateChanges(deltas, {})
def pop(self):
self._tracker.pop()
def drawNode(self, node):
"""This is the recursive method called for each node
in the tree"""
# Undefined here, but with closer analysis probably can be handled in superclass
self.undefined("drawNode")
def getStateValue(self, key):
"""Return current state parameter for given key"""
currentState = self._tracker._combined[-1]
return currentState[key]
def fillDerivedValues(self, node):
"""Examine a node for any values which are Derived,
and replace them with their calculated values.
Generally things may look at the drawing or their
parent.
"""
for (key, value) in node.__dict__.items():
if isinstance(value, DerivedValue):
#just replace with default for key?
#print ' fillDerivedValues(%s)' % key
newValue = value.getValue(self, key)
#print ' got value of %s' % newValue
node.__dict__[key] = newValue
def drawNodeDispatcher(self, node):
"""dispatch on the node's (super) class: shared code"""
canvas = getattr(self,'_canvas',None)
# replace UserNode with its contents
try:
node = _expandUserNode(node,canvas)
if not node: return
if hasattr(node,'_canvas'):
ocanvas = 1
else:
node._canvas = canvas
ocanvas = None
self.fillDerivedValues(node)
dtcb = getattr(node,'_drawTimeCallback',None)
if dtcb:
dtcb(node,canvas=canvas,renderer=self)
#draw the object, or recurse
if isinstance(node, Line):
self.drawLine(node)
elif isinstance(node, Image):
self.drawImage(node)
elif isinstance(node, Rect):
self.drawRect(node)
elif isinstance(node, Circle):
self.drawCircle(node)
elif isinstance(node, Ellipse):
self.drawEllipse(node)
elif isinstance(node, PolyLine):
self.drawPolyLine(node)
elif isinstance(node, Polygon):
self.drawPolygon(node)
elif isinstance(node, Path):
self.drawPath(node)
elif isinstance(node, String):
self.drawString(node)
elif isinstance(node, Group):
self.drawGroup(node)
elif isinstance(node, Wedge):
self.drawWedge(node)
else:
print 'DrawingError','Unexpected element %s in drawing!' % str(node)
finally:
if not ocanvas: del node._canvas
_restores = {'stroke':'_stroke','stroke_width': '_lineWidth','stroke_linecap':'_lineCap',
'stroke_linejoin':'_lineJoin','fill':'_fill','font_family':'_font',
'font_size':'_fontSize'}
def drawGroup(self, group):
# just do the contents. Some renderers might need to override this
# if they need a flipped transform
canvas = getattr(self,'_canvas',None)
for node in group.getContents():
node = _expandUserNode(node,canvas)
if not node: continue
#here is where we do derived values - this seems to get everything. Touch wood.
self.fillDerivedValues(node)
try:
if hasattr(node,'_canvas'):
ocanvas = 1
else:
node._canvas = canvas
ocanvas = None
node._parent = group
self.drawNode(node)
finally:
del node._parent
if not ocanvas: del node._canvas
def drawWedge(self, wedge):
# by default ask the wedge to make a polygon of itself and draw that!
#print "drawWedge"
polygon = wedge.asPolygon()
self.drawPolygon(polygon)
def drawPath(self, path):
polygons = path.asPolygons()
for polygon in polygons:
self.drawPolygon(polygon)
def drawRect(self, rect):
# could be implemented in terms of polygon
self.undefined("drawRect")
def drawLine(self, line):
self.undefined("drawLine")
def drawCircle(self, circle):
self.undefined("drawCircle")
def drawPolyLine(self, p):
self.undefined("drawPolyLine")
def drawEllipse(self, ellipse):
self.undefined("drawEllipse")
def drawPolygon(self, p):
self.undefined("drawPolygon")
def drawString(self, stringObj):
self.undefined("drawString")
def applyStateChanges(self, delta, newState):
"""This takes a set of states, and outputs the operators
needed to set those properties"""
self.undefined("applyStateChanges")
if __name__=='__main__':
print "this file has no script interpretation"
print __doc__
| TaskEvolution/Task-Coach-Evolution | taskcoach/taskcoachlib/thirdparty/src/reportlab/graphics/renderbase.py | Python | gpl-3.0 | 12,798 |
import json
from askmath.entities import TextMessage
from askmath.models.discipline import Discipline as DisciplineModel
from askmath.models.lesson import Lesson as LessonModel
from askmath.models.question import Question as QuestionModel
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.decorators import method_decorator
from .iquestion import IQuestion
from .question import Question
class ProxyQuestion(IQuestion):
def __init__(self):
self.__question = Question()
@method_decorator(login_required)
def view_questions(self, request, id_lesson, id_discipline, id_question=None):
if request.user.has_perm("askmath.read_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
if id_question:
try:
question = QuestionModel.objects.get(id=id_question)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_NOT_FOUND)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson}))
else:
question = None
try:
return self.__question.view_questions(request, lesson, discipline, question)
except Exception, e:
print e
messages.error(request, TextMessage.ERROR)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
@method_decorator(login_required)
def view_questions_removed(self, request, id_lesson, id_discipline):
if request.user.has_perm("askmath.read_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
return self.__question.view_questions_removed(request, lesson, discipline)
except Exception, e:
print e
messages.error(request, TextMessage.ERROR)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
@method_decorator(login_required)
def add_question(self, request, id_lesson, id_discipline, quantity_items=5):
if request.user.has_perm("askmath.write_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
return self.__question.add_question(request, lesson, discipline, int(quantity_items))
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_ERROR_ADD)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
@method_decorator(login_required)
def remove_question(self, request, id_question, id_lesson, id_discipline):
if request.user.has_perm("askmath.write_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
question = QuestionModel.objects.get(id=id_question)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_NOT_FOUND)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson}))
try:
return self.__question.remove_question(request, question, lesson, discipline)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_ERROR_REM)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
@method_decorator(login_required)
def edit_question(self, request, id_question, id_lesson, id_discipline):
if request.user.has_perm("askmath.write_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
question = QuestionModel.objects.get(id=id_question)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_NOT_FOUND)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson}))
try:
return self.__question.edit_question(request, question, lesson, discipline)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_ERROR_EDIT)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
@method_decorator(login_required)
def restore_question(self, request, id_question, id_lesson, id_discipline):
if request.user.has_perm("askmath.write_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
question = QuestionModel.objects.get(id=id_question)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_NOT_FOUND)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson}))
try:
return self.__question.restore_question(request, question, lesson, discipline)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_ERROR_RESTORE)
else:
messages.error(request, TextMessage.USER_NOT_PERMISSION)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson}))
@method_decorator(login_required)
def sort_questions(self, request, id_lesson, id_discipline):
if request.user.has_perm("askmath.read_question") and request.user.has_perm("askmath.access_manager"):
try:
discipline = DisciplineModel.objects.get(id=id_discipline)
except Exception, e:
print e
messages.error(request, TextMessage.DISCIPLINE_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
lesson = LessonModel.objects.get(id=id_lesson)
except Exception, e:
print e
messages.error(request, TextMessage.LESSON_NOT_FOUND)
return HttpResponseRedirect( reverse('askmath:manager_lesson_view', kwargs={'id_lesson': id_lesson}))
try:
if request.method == "POST":
new_order = json.loads(request.POST['new_order'])
return self.__question.sort_questions(request, lesson, discipline, new_order)
else:
return self.__question.sort_questions(request, lesson, discipline, None)
except Exception, e:
print e
messages.error(request, TextMessage.QUESTION_ERROR_SORT)
return HttpResponseRedirect(reverse('askmath:manager_question_view', kwargs={'id_discipline': id_discipline, 'id_lesson': id_lesson})) | saraivaufc/askMathPlus | askmath/views/manager/question/proxyquestion.py | Python | gpl-2.0 | 9,799 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10a1 on 2016-06-21 09:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('discussions', '0004_post_author'),
]
operations = [
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| Udayraj123/dashboard_IITG | Binder/discussions/migrations/0005_auto_20160621_1438.py | Python | mit | 572 |
#!/usr/bin/env python3
import fea
import numpy as np
import stltovoxel
import stltovoxel.stl_reader
from stltovoxel import slice
from stltovoxel import util
def readVoxels(inputFilePath,resolution=90):
mesh = list(stltovoxel.stl_reader.read_stl_verticies(inputFilePath))
(scale, shift, bounding_box) = stltovoxel.slice.calculateScaleAndShift(mesh, resolution)
mesh = list(slice.scaleAndShiftMesh(mesh, scale, shift))
#Note: vol should be addressed with vol[z][x][y]
vol = np.zeros((bounding_box[2],bounding_box[0],bounding_box[1]), dtype=bool)
for height in range(bounding_box[2]):
print('Processing layer %d/%d'%(height+1,bounding_box[2]))
lines = slice.toIntersectingLines(mesh, height)
prepixel = np.zeros((bounding_box[0], bounding_box[1]), dtype=bool)
stltovoxel.perimeter.linesToVoxels(lines, prepixel)
vol[height] = prepixel
vol, bounding_box = util.padVoxelArray(vol)
return (vol,bounding_box,scale,shift)
def writeMesh(mesh,filename):
file=open(filename,'w')
file.write('*NODE,nset=all\n')
for node in mesh.nodes.values():
file.write(str(node.num))
for c in node.coord:
file.write(','+str(c))
file.write('\n')
if mesh.linear:
file.write('*ELEMENT, type=C3D8, elset=all\n')
else:
file.write('*ELEMENT, type=C3D20R, elset=all\n')
for element in mesh.elements.values():
file.write(str(element.num))
i=0
j=0
file.write(',')
for n in element.nodes:
file.write(str(n))
i+=1
j+=1
if j<20:
file.write(',')
if i>14:
file.write('\n')
i=0
file.write('\n')
for sset in mesh.nsets.keys(): #print all node sets
file.write('*nset,nset='+sset+'\n')
for num in mesh.nsets[sset]:
file.write(str(num)+'\n')
for sset in mesh.esets.keys(): #print all element sets
file.write('*elset,elset='+sset+'\n')
for num in mesh.esets[sset]:
file.write(str(num)+'\n')
#Add a bed of weak beams under the plate
beamNodePairs=[]
shift=30
file.write('*node,nset=nodeBed\n')
for n in mesh.nsets['interface']:
node=mesh.getNode(n)
newNum=mesh.addNode((node.coord[0],node.coord[1],node.coord[2]-shift))
newNode=mesh.getNode(newNum)
beamNodePairs.append([n,newNum])
file.write(str(newNum)+','+str(newNode.coord[0])+','+str(newNode.coord[1])+','+str(newNode.coord[2])+'\n')
file.write('*element,TYPE=B31,elset=beamBed\n')
enumStart=mesh.highestElementNumber+1
i=0
for p in beamNodePairs:
file.write(str(enumStart+i)+','+str(p[0])+','+str(p[1])+'\n')
i+=1
'''
#lock three nodes L1,L2,L3 in the center of the build plate
X=set()
Y=set()
for n in mesh.nsets['buildBottom']:
node=mesh.getNode(n)
X.add(node.coord[0])
Y.add(node.coord[1])
if len(X)%2==0: #if the sets of coordinates contain an even number of elements remove the last one
X.remove(max(X))
if len(Y)%2==0:
Y.remove(max(Y))
OneNodeCoord=[np.median(list(X)),np.median(list(Y))]
print('OneNodeCoord',OneNodeCoord)
Z=node.coord[2]
tol=1e-5
nodeswithin=mesh.getNodesWithIn(OneNodeCoord[0]-tol,OneNodeCoord[0]+tol,OneNodeCoord[1]-tol,OneNodeCoord[1]+tol,Z-tol,Z+tol)[0]
#nodeswithin=mesh.getNodesWithIn(OneNodeCoord[0]-tol,OneNodeCoord[0]+tol,OneNodeCoord[1]-tol,OneNodeCoord[1]+tol,-10000000,100000000)[0]
OneNode=mesh.getNode(nodeswithin)
belongsToElem=mesh.getElement(mesh.getElementsWithNode(OneNode.num)[0]) #first element connected to L1
bottomNodes=belongsToElem.getFace(2)
file.write('*NSET,NSET=L1\n')
file.write(str(bottomNodes[0])+'\n')
file.write('*NSET,NSET=L2\n')
file.write(str(bottomNodes[1])+'\n')
file.write('*NSET,NSET=L3\n')
file.write(str(bottomNodes[2])+'\n')
'''
file.close()
def writeSteps(layers,startLayer,filename,dwell,temp,mesh,creep=False):
file=open(filename,'w')
step=1
for i in range(startLayer,layers+2):
step+=1
file.write("""**------------------------- Step """+str(step+1) +"""---------------------------------------------
**
**
*Step,INC=1000
*STATIC,NLGEOM
1e-8,"""+str(dwell)+""",1e-10,
**""")
if i < layers+1: #dont do this for more layers than exist in model
file.write("""
*MODEL CHANGE,TYPE=ELEMENT,ADD=STRAINFREE
layer_"""+str(i+1))
file.write("""
""")
if i > startLayer:
file.write("""
*TEMPERATURE
layer_"""+str(i)+','+str(temp))
file.write("""
*End Step
""")
if creep:
file.write("""*Step,INC=1000
*VISCO,CETOL=1e-3
1e-3,"""+str(dwell)+""",1e-10,
**
*End Step
""")
if creep:
step+=1
file.close()
def readParameters(filename):
parameters={}
file=open(filename,'r')
for line in file:
#print(line)
try:
p=line.split('=')[0]
v=line.split('=')[1]
if p=='resolution':
parameters['resolution']=int(v)
elif p=='comment':
parameters[p]=v
else:
parameters[p]=float(v)
except:
pass
return(parameters)
def run(parameters,name,dir_path,creep=True):
import uuid
uid=str(uuid.uuid4())
(vol,bounding_box,scale,shift)=readVoxels(name,parameters['resolution'])
layerThickness=1/scale[2]
dwell=layerThickness/parameters['speed']
mesh=fea.mesh([],[]) #create empty mesh
mesh.createEmptyWebofSectors([0,bounding_box[0]/scale[0]],[0,bounding_box[1]/scale[1]],[0,bounding_box[2]/scale[2]])
totalLayers=0
print('Generating mesh...')
for k in range(bounding_box[2]):
totalLayers+=1
print('layer '+str(k+1)+' / '+str(bounding_box[2]))
for j in range(bounding_box[1]):
for i in range(bounding_box[0]):
if vol[k][i][j]==True:
x=(i-1)/scale[0]-shift[0]
y=(j-1)/scale[1]-shift[1]
z=(k-1)/scale[2]-shift[2]
num=mesh.createAndAddElement([x,y,z],[1/scale[0],1/scale[1],1/scale[2]])
mesh.add2elset(num,'layer_'+str(k-1))
enodes=mesh.getElement(num).nodes
for n in enodes:
mesh.add2nset(n,'layer_'+str(k-1))
quad=False
if quad:
print("Replacing linear elements with quadratic ones")
mesh.quad()
mesh.update()
print(('nodes',len(mesh.nodes)))
print(('elements',len(mesh.elements)))
zmin=1e9
for n in mesh.nodes:
node=mesh.getNode(n)
if zmin>node.coord[2]:
zmin=node.coord[2]
#Identify the build plate
Zs=[n.coord[2] for n in mesh.nodes.values()]
zmin=min(Zs)
Ys=[n.coord[1] for n in mesh.nodes.values()]
ymin=min(Ys)
ymax=max(Ys)
Xs=[n.coord[0] for n in mesh.nodes.values()]
xmin=min(Xs)
xmax=max(Xs)
xmins=mesh.getNodesWithIn(xmin-1e-5,xmin+1e-5,-1000000000,100000000,-1000000000,100000000000)
zmax=-100000000
for node in xmins: #find the largest z in the xmins nodes
x=mesh.getNode(node).coord[0]
z=mesh.getNode(node).coord[2]
if z>zmax:
zmax=z
boxSize=[xmax-xmin,ymax-ymin,zmax-zmin]
topOfBuild=-10000000000
for n in mesh.nodes:
node=mesh.getNode(n)
if topOfBuild<node.coord[2]:
topOfBuild=node.coord[2]
buildBottom=mesh.getNodesWithIn(-1000000000,100000000000,-1000000000,100000000000,+zmin+boxSize[2]-1e-5,+zmin+boxSize[2]+1e-5)
buildPlate=mesh.getNodesWithIn(-1000000000,100000000000,-1000000000,100000000000,zmin-1e-5,zmin+boxSize[2]+1e-5)
buildPlateElements=mesh.getElementsWithNodes(buildPlate,any=True)
top=mesh.getNodesWithIn(-1000000000,100000000000,-1000000000,100000000000,topOfBuild-1e-5,topOfBuild+1e-5)
bottomNodes=mesh.getNodesWithIn(-1000000000,100000000000,-1000000000,100000000000,zmin-1e-5,zmin+1e-5) #the very bottom nodes of the plate
layersInPlate=int(np.round((zmax-zmin)*scale[2]))
bottomElements=mesh.getElementsWithNodes(bottomNodes,any=True)
interface=list(set(buildPlate) & set(mesh.nsets['layer_'+str(layersInPlate)]))
build=list(set(mesh.nodes)-set(buildPlate))
buildElements=mesh.getElementsWithNodes(build,any=True)
buildPlateElements=list(set(mesh.getElementsWithNodes(buildPlate,any=True))-set(mesh.getElementsWithNodes(build,any=True)))
for n in buildBottom:
mesh.add2nset(n,'buildBottom')
for n in top:
mesh.add2nset(n,'top')
for n in buildPlate:
mesh.add2nset(n,'buildPlate')
for e in buildPlateElements:
mesh.add2elset(e,'buildPlateElements')
for e in bottomElements:
mesh.add2elset(e,'bottomElements')
for n in build:
mesh.add2nset(n,'build')
for e in buildElements:
mesh.add2elset(e,'buildElements')
for n in interface:
mesh.add2nset(n,'interface')
for n in bottomNodes:
mesh.add2nset(n,'bottomNodes')
mesh.createWebofSectors(d1=37,d2=43)
import shutil
import os
filename=os.path.basename(name)
print('filename',filename)
directory=filename+'_'+uid
if not os.path.exists(directory):
os.makedirs(directory)
cwd=os.getcwd()
parameters['dwell']=dwell
parseInput(os.path.normpath(dir_path+'/am.inp'),os.path.normpath(directory+'/am.inp'),parameters)
shutil.copy(os.path.normpath(name),os.path.normpath(directory+'/'))
try: #parameter file may not exist
shutil.copy(os.path.normpath(name+'.par'),os.path.normpath(directory+'/'+name+'.par'))
except:
pass
writeMesh(mesh,os.path.normpath(directory+'/geom.inp'))
writeSteps(layers=totalLayers-4,startLayer=layersInPlate-1,filename=os.path.normpath(directory+'/steps.inp'),dwell=dwell,temp=parameters['sinkTemp'],mesh=mesh,creep=creep)
return (directory,mesh)
def parseInput(infilename,outfilename,parameters): #read infile and replace all parameters with actual numbers
workfile = None
with open(infilename, 'r') as file :
workfile = file.read()
# replace paremeters with actual numbers
for key in parameters.keys():
print(key+','+str(parameters[key]))
workfile = workfile.replace('#'+key,str(parameters[key]))
# Write the file
with open(outfilename, 'w') as file:
file.write(workfile)
def calc(directory,cpus=1): #Run calculix.
print('Solving..')
import os
os.chdir(directory)
os.environ['OMP_NUM_THREADS']=str(cpus)
os.system('ccx_fame am > ccx_output.txt')
os.chdir('../')
if __name__ == "__main__":
print("""
------------------------------------------------
| FAME v.0.8 |
------------------------------------------------
""")
import getopt,sys,os,shutil
creep=False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'i:p:c:')
except getopt.GetoptError as err:
print(str(err))
run=False
cpus=1
for o, a in opts:
print(o,a)
if o in '-i':
name=a
if o in '-p':
parameterFilename=a;
if o in '-c':
cpus=int(a)
if 'creep' in args:
creep=True
print('No creep steps')
if 'noadjust' in args:
scale=-1
else:
scale=1
dir_path = os.path.dirname(os.path.realpath(__file__)) #directory where the FAME.py file resides
parameters=readParameters(parameterFilename)
(directory,mesh)=run(parameters,name,dir_path,creep)
calc(directory=directory,cpus=cpus)
import post
post.readResults(os.path.normpath(directory+'/am.frd'),mesh)
stlmesh=post.readSTL(name)
print('Adjusting STL')
resultPath=os.path.relpath(directory+'/'+os.path.basename(name)[:-4])
post.adjustSTL(resultPath,mesh,stlmesh,scale=scale,power=3)
shutil.copy(os.path.relpath(directory+'/'+os.path.basename(name)[:-4]+'_adjusted.stl'),os.path.relpath(os.path.basename(name)[:-4]+'_adjusted.stl'))
| wredenberg/FAME | FAME.py | Python | gpl-3.0 | 12,571 |
import cProfile
import json
import os
import unittest
from pstats import Stats
from typing import Callable
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES
from draftjs_exporter.defaults import BLOCK_MAP, STYLE_MAP
from draftjs_exporter.dom import DOM
from draftjs_exporter.html import HTML
from draftjs_exporter.types import ContentState
from tests.test_composite_decorators import (
BR_DECORATOR,
HASHTAG_DECORATOR,
LINKIFY_DECORATOR,
)
from tests.test_entities import hr, image, link
fixtures_path = os.path.join(os.path.dirname(__file__), "test_exports.json")
with open(fixtures_path, "r") as f:
fixtures = json.loads(f.read())
exporter = HTML(
{
"entity_decorators": {
ENTITY_TYPES.LINK: link,
ENTITY_TYPES.HORIZONTAL_RULE: hr,
ENTITY_TYPES.IMAGE: image,
ENTITY_TYPES.EMBED: None,
},
"composite_decorators": [
BR_DECORATOR,
LINKIFY_DECORATOR,
HASHTAG_DECORATOR,
],
"block_map": dict(
BLOCK_MAP,
**{
BLOCK_TYPES.UNORDERED_LIST_ITEM: {
"element": "li",
"wrapper": "ul",
"wrapper_props": {"class": "bullet-list"},
}
},
),
"style_map": dict(
STYLE_MAP,
**{
"KBD": "kbd",
"HIGHLIGHT": {
"element": "strong",
"props": {"style": {"textDecoration": "underline"}},
},
},
),
}
)
class TestExportsMeta(type):
"""
Generates test cases dynamically.
See http://stackoverflow.com/a/20870875/1798491
"""
pr: cProfile.Profile = None # type: ignore
def __new__(mcs, name, bases, tests):
def gen_test(
content: ContentState, html: str
) -> Callable[[None], None]:
def test(self):
self.assertEqual(exporter.render(content), html)
return test
engine = name.replace("TestExports", "").lower()
for export in fixtures:
test_label = export["label"].lower().replace(" ", "_")
test_name = f"test_export_{engine}_{test_label}"
content = export["content_state"]
html = export["output"][engine]
tests[test_name] = gen_test(content, html)
return type.__new__(mcs, name, bases, tests)
class TestExportsHTML5LIB(unittest.TestCase, metaclass=TestExportsMeta):
@classmethod
def setUpClass(cls):
DOM.use(DOM.HTML5LIB)
cls.pr = cProfile.Profile()
cls.pr.enable()
print("\nhtml5lib")
@classmethod
def tearDownClass(cls):
cls.pr.disable()
Stats(cls.pr).strip_dirs().sort_stats("cumulative").print_stats(0)
class TestExportsLXML(unittest.TestCase, metaclass=TestExportsMeta):
@classmethod
def setUpClass(cls):
DOM.use(DOM.LXML)
cls.pr = cProfile.Profile()
cls.pr.enable()
print("\nlxml")
@classmethod
def tearDownClass(cls):
cls.pr.disable()
Stats(cls.pr).strip_dirs().sort_stats("cumulative").print_stats(0)
class TestExportsSTRING(unittest.TestCase, metaclass=TestExportsMeta):
@classmethod
def setUpClass(cls):
DOM.use(DOM.STRING)
cls.pr = cProfile.Profile()
cls.pr.enable()
print("\nstring")
@classmethod
def tearDownClass(cls):
cls.pr.disable()
Stats(cls.pr).strip_dirs().sort_stats("cumulative").print_stats(0)
if __name__ == "__main__":
unittest.main()
| springload/draftjs_exporter | tests/test_exports.py | Python | mit | 3,660 |
#!/usr/bin/env python3
import acm_cli.main
if __name__ == '__main__':
acm_cli.main.main()
| actics/acmcli | acmcli.py | Python | mit | 96 |
#----------------------------------------------------------------------
# Copyright (c) 2010 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
"""
Reference GENI GCF Clearinghouse. Uses SFA Certificate and credential objects.
Run from gcf-ch.py
Will produce signed user credentials from a GID, return a
list of aggregates read from a config file, and create a new Slice Credential.
"""
import datetime
import traceback
import uuid
import os
import dateutil.parser
from SecureXMLRPCServer import SecureXMLRPCServer
from expedient.common.federation.geni.util import cred_util
from expedient.common.federation.geni.util import cert_util
from expedient.common.federation.geni.util import urn_util
from expedient.common.federation.sfa.trust import gid
# Substitute eg "openflow//stanford"
# Be sure this matches init-ca.py:CERT_AUTHORITY
# This is in publicid format
SLICE_AUTHORITY = "geni//gpo//gcf"
# Credential lifetimes in seconds
# Extend slice lifetimes to actually use the resources
USER_CRED_LIFE = 86400
SLICE_CRED_LIFE = 3600
# Make the max life of a slice 30 days (an arbitrary length).
SLICE_MAX_LIFE_SECS = 30 * 24 * 60 * 60
# The list of Aggregates that this Clearinghouse knows about
# should be defined in the gcf_config file in the am_* properties.
# ListResources will refer the client to these aggregates
# Clearinghouse.runserver currently does the register_aggregate_pair
# calls for each row in that file
# but this should be doable dynamically
# Some sample pairs:
# GPOMYPLC = ('urn:publicid:IDN+plc:gpo1+authority+sa',
# 'http://myplc1.gpolab.bbn.com:12348')
# TESTGCFAM = ('urn:publicid:IDN+geni.net:gpo+authority+gcf',
# 'https://127.0.0.1:8001')
# OTHERGPOMYPLC = ('urn:publicid:IDN+plc:gpo+authority+site2',
# 'http://128.89.81.74:12348')
# ELABINELABAM = ('urn:publicid:IDN+elabinelab.geni.emulab.net',
# 'https://myboss.elabinelab.geni.emulab.net:443/protogeni/xmlrpc/am')
class SampleClearinghouseServer(object):
"""A sample clearinghouse with barebones functionality."""
def __init__(self, delegate):
self._delegate = delegate
def GetVersion(self):
return self._delegate.GetVersion()
def CreateSlice(self, urn=None):
return self._delegate.CreateSlice(urn_req=urn)
def RenewSlice(self, urn, expire_str):
try:
return self._delegate.RenewSlice(urn, expire_str)
except:
self._delegate.logger.error(traceback.format_exc())
raise
def DeleteSlice(self, urn):
return self._delegate.DeleteSlice(urn)
def ListAggregates(self):
return self._delegate.ListAggregates()
def CreateUserCredential(self, cert):
return self._delegate.CreateUserCredential(cert)
class Clearinghouse(object):
def __init__(self):
self.logger = cred_util.logging.getLogger('gcf-ch')
self.slices = {}
self.aggs = []
def load_aggregates(self):
"""Loads aggregates from the clearinghouse section of the config file.
In the config section there are keys for each am, am_1, am_2, ..., am_n
The value for each key is the urn and url of the aggregate separated by a comma
Returns True if aggregates were loaded, False otherwise.
"""
for (key, val) in self.config['clearinghouse'].items():
if not key.startswith('am_'):
continue
(urn,url) = val.split(',')
urn = urn.strip()
url = url.strip()
if not urn:
self.logger.warn('Empty URN for aggregate %s in gcf_config' % key)
continue
if not url:
self.logger.warn('Empty URL for aggregate %s in gcf_config' % key)
continue
if urn in [x for (x, _) in self.aggs]:
self.logger.warn('Duplicate URN %s in gcf_config' % key)
continue
self.logger.info("Registering AM %s at %s", urn, url)
self.aggs.append((urn, url))
def runserver(self, addr, keyfile=None, certfile=None,
ca_certs=None, authority=None,
user_len=None, slice_len=None, config=None):
"""Run the clearinghouse server."""
# ca_certs is a dir of several certificates for peering
# If not supplied just use the certfile as the only trusted root
self.keyfile = keyfile
self.certfile = certfile
self.config = config
# Error check the keyfile, certfile all exist
if keyfile is None or not os.path.isfile(os.path.expanduser(keyfile)):
raise Exception("Missing CH key file %s" % keyfile)
if certfile is None or not os.path.isfile(os.path.expanduser(certfile)):
raise Exception("Missing CH cert file %s" % certfile)
if ca_certs is None:
ca_certs = certfile
self.logger.info("Using only my CH cert as a trusted root cert")
self.trusted_root_files = cred_util.CredentialVerifier(ca_certs).root_cert_files
if not os.path.exists(os.path.expanduser(ca_certs)):
raise Exception("Missing CA cert(s): %s" % ca_certs)
global SLICE_AUTHORITY, USER_CRED_LIFE, SLICE_CRED_LIFE
SLICE_AUTHORITY = authority
USER_CRED_LIFE = int(user_len)
SLICE_CRED_LIFE = int(slice_len)
# Load up the aggregates
self.load_aggregates()
# This is the arg to _make_server
ca_certs_onefname = cred_util.CredentialVerifier.getCAsFileFromDir(ca_certs)
# This is used below by CreateSlice
self.ca_cert_fnames = []
if os.path.isfile(os.path.expanduser(ca_certs)):
self.ca_cert_fnames = [os.path.expanduser(ca_certs)]
elif os.path.isdir(os.path.expanduser(ca_certs)):
self.ca_cert_fnames = [os.path.join(os.path.expanduser(ca_certs), name) for name in os.listdir(os.path.expanduser(ca_certs)) if name != cred_util.CredentialVerifier.CATEDCERTSFNAME]
# Create the xmlrpc server, load the rootkeys and do the ssl thing.
self._server = self._make_server(addr, keyfile, certfile,
ca_certs_onefname)
self._server.register_instance(SampleClearinghouseServer(self))
self.logger.info('GENI CH Listening on port %d...' % (addr[1]))
self._server.serve_forever()
def _make_server(self, addr, keyfile=None, certfile=None,
ca_certs=None):
"""Creates the XML RPC server."""
# ca_certs is a file of concatenated certs
return SecureXMLRPCServer(addr, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs)
def GetVersion(self):
self.logger.info("Called GetVersion")
version = dict()
version['gcf-ch_api'] = 1
return version
# FIXME: Change that URN to be a name and non-optional
# Currently gcf-test.py doesnt supply it, and
# Omni takes a name and constructs a URN to supply
def CreateSlice(self, urn_req = None):
self.logger.info("Called CreateSlice URN REQ %r" % urn_req)
slice_gid = None
if urn_req and self.slices.has_key(urn_req):
# If the Slice has expired, treat this as
# a request to renew
slice_cred = self.slices[urn_req]
if slice_cred.expiration <= datetime.datetime.utcnow():
# Need to renew this slice
self.logger.info("CreateSlice on %r found existing cred that expired at %r - will renew", urn_req, slice_cred.expiration)
slice_gid = slice_cred.get_gid_object()
else:
self.logger.debug("Slice cred is still valid at %r until %r - return it", datetime.datetime.utcnow(), slice_cred.expiration)
return slice_cred.save_to_string()
# First ensure we have a slice_urn
if urn_req:
# FIXME: Validate urn_req has the right form
# to be issued by this CH
if not urn_util.is_valid_urn(urn_req):
# FIXME: make sure it isnt empty, etc...
urn = urn_util.publicid_to_urn(urn_req)
else:
urn = urn_req
else:
# Generate a unique URN for the slice
# based on this CH location and a UUID
# Where was the slice created?
(ipaddr, port) = self._server.socket._sock.getsockname()
# FIXME: Get public_id start from a properties file
# Create a unique name for the slice based on uuid
slice_name = uuid.uuid4().__str__()[4:12]
public_id = 'IDN %s slice %s//%s:%d' % (SLICE_AUTHORITY, slice_name,
ipaddr,
port)
# this func adds the urn:publicid:
# and converts spaces to +'s, and // to :
urn = urn_util.publicid_to_urn(public_id)
# Now create a GID for the slice (signed credential)
if slice_gid is None:
try:
slice_gid = cert_util.create_cert(urn, self.keyfile, self.certfile)[0]
except Exception, exc:
self.logger.error("Cant create slice gid for slice urn %s: %s", urn, traceback.format_exc())
raise Exception("Failed to create slice %s. Cant create slice gid" % urn, exc)
# Now get the user GID which will have permissions on this slice.
# Get client x509 cert from the SSL connection
# It doesnt have the chain but should be signed
# by this CHs cert, which should also be a trusted
# root at any federated AM. So everyone can verify it as is.
# Note that if a user from a different CH (installed
# as trusted by this CH for some reason) called this method,
# that user would be used here - and can still get a valid slice
try:
user_gid = gid.GID(string=self._server.pem_cert)
except Exception, exc:
self.logger.error("CreateSlice failed to create user_gid from SSL client cert: %s", traceback.format_exc())
raise Exception("Failed to create slice %s. Cant get user GID from SSL client certificate." % urn, exc)
# OK have a user_gid so can get a slice credential
# authorizing this user on the slice
try:
slice_cred = self.create_slice_credential(user_gid,
slice_gid,
SLICE_CRED_LIFE)
except Exception, exc:
self.logger.error('CreateSlice failed to get slice credential for user %r, slice %r: %s', user_gid.get_hrn(), slice_gid.get_hrn(), traceback.format_exc())
raise Exception('CreateSlice failed to get slice credential for user %r, slice %r' % (user_gid.get_hrn(), slice_gid.get_hrn()), exc)
self.logger.info('Created slice %r' % (urn))
self.slices[urn] = slice_cred
return slice_cred.save_to_string()
def RenewSlice(self, slice_urn, expire_str):
self.logger.info("Called RenewSlice(%s, %s)", slice_urn, expire_str)
if not self.slices.has_key(slice_urn):
self.logger.warning('Slice %s was not found', slice_urn)
return False
try:
in_expiration = dateutil.parser.parse(expire_str)
except:
self.logger.warning('Unable to parse date "%s"', expire_str)
return False
# Is requested expiration valid? It must be in the future,
# but not too far into the future.
now = datetime.datetime.utcnow()
now = now.replace(tzinfo=in_expiration.tzinfo)
if in_expiration < now:
self.logger.warning('Expiration "%s" is in the past.', expire_str)
return False
duration = in_expiration - now
max_duration = datetime.timedelta(seconds=SLICE_MAX_LIFE_SECS)
if duration > max_duration:
self.logger.warning('Expiration %s is too far in the future.',
expire_str)
return False
# Everything checks out, so create a new slice cred and tuck it away.
user_gid = gid.GID(string=self._server.pem_cert)
slice_cred = self.slices[slice_urn]
slice_gid = slice_cred.get_gid_object()
duration_secs = duration.seconds + duration.days * 24 * 3600
slice_cred = self.create_slice_credential(user_gid, slice_gid,
duration_secs)
self.logger.info("Slice %s renewed to %s", slice_urn, expire_str)
return True
def DeleteSlice(self, urn_req):
self.logger.info("Called DeleteSlice %r" % urn_req)
if self.slices.has_key(urn_req):
self.slices.pop(urn_req)
self.logger.info("Deleted slice")
return True
self.logger.info('Slice was not found')
# Slice not found!
# FIXME: Raise an error so client knows why this failed?
return False
def ListAggregates(self):
self.logger.info("Called ListAggregates")
# TODO: Allow dynamic registration of aggregates
return self.aggs
def CreateUserCredential(self, user_gid):
'''Return string representation of a user credential
issued by this CH with caller/object this user_gid (string)
with user privileges'''
# FIXME: Validate arg - non empty, my user
user_gid = gid.GID(string=user_gid)
self.logger.info("Called CreateUserCredential for GID %s" % user_gid.get_hrn())
try:
ucred = cred_util.create_credential(user_gid, user_gid, USER_CRED_LIFE, 'user', self.keyfile, self.certfile, self.trusted_root_files)
except Exception, exc:
self.logger.error("Failed to create user credential for %s: %s", user_gid.get_hrn(), traceback.format_exc())
raise Exception("Failed to create user credential for %s" % user_gid.get_hrn(), exc)
return ucred.save_to_string()
def create_slice_credential(self, user_gid, slice_gid, duration):
'''Create a Slice credential object for this user_gid (object) on given slice gid (object)'''
# FIXME: Validate the user_gid and slice_gid
# are my user and slice
return cred_util.create_credential(user_gid, slice_gid, duration, 'slice', self.keyfile, self.certfile, self.trusted_root_files )
| dana-i2cat/felix | expedient/src/python/expedient/common/federation/geni/ch.py | Python | apache-2.0 | 15,839 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
from ckeditor.views import upload, browse
from utils.decorators import subjudge_auth_required
from django.views.decorators.csrf import csrf_exempt
import autocomplete_light
# OP autodiscover
autocomplete_light.autodiscover()
urlpatterns = patterns(
'',
url(r'^ckeditor/upload/', csrf_exempt(subjudge_auth_required(upload)),
name='ckeditor_upload'),
url(r'^ckeditor/browse/', csrf_exempt(subjudge_auth_required(browse)),
name='ckeditor_browse'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^autocomplete/', include('autocomplete_light.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('index.urls', namespace='index')),
url(r'^problem/', include('problem.urls', namespace='problem')),
url(r'^contest/', include('contest.urls', namespace='contest')),
url(r'^users/', include('users.urls', namespace='users')),
url(r'^team/', include('team.urls', namespace='team')),
url(r'^group/', include('group.urls', namespace='group')),
url(r'^status/', include('status.urls', namespace='status')),
)
handler400 = 'index.views.custom_400'
handler403 = 'index.views.custom_403'
handler404 = 'index.views.custom_404'
handler500 = 'index.views.custom_500'
| bruce3557/NTHUOJ_web | nthuoj/urls.py | Python | mit | 1,441 |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import render_template, session
from indico.modules.events.management.views import WPEventManagement
from indico.modules.events.papers.forms import (PaperCommentForm, PaperJudgmentForm, PaperSubmissionForm,
build_review_form)
from indico.modules.events.views import WPConferenceDisplayBase
from indico.util.mathjax import MathjaxMixin
class WPManagePapers(MathjaxMixin, WPEventManagement):
template_prefix = 'events/papers/'
sidemenu_option = 'papers'
bundles = ('markdown.js', 'module_events.papers.js')
def _get_head_content(self):
return WPEventManagement._get_head_content(self) + MathjaxMixin._get_head_content(self)
class WPDisplayPapersBase(WPConferenceDisplayBase):
template_prefix = 'events/papers/'
bundles = ('markdown.js', 'module_events.management.js', 'module_events.papers.js')
class WPDisplayJudgingArea(WPDisplayPapersBase):
menu_entry_name = 'paper_judging_area'
def render_paper_page(paper, view_class=None):
comment_form = (PaperCommentForm(paper=paper, user=session.user, formdata=None)
if not paper.is_in_final_state else None)
review_form = None
reviewed_for_groups = list(paper.last_revision.get_reviewed_for_groups(session.user))
if len(reviewed_for_groups) == 1:
review_form = build_review_form(paper.last_revision, reviewed_for_groups[0])
judgment_form = PaperJudgmentForm(formdata=None, paper=paper)
revision_form = PaperSubmissionForm(formdata=None)
params = {
'paper': paper,
'comment_form': comment_form,
'review_form': review_form,
'judgment_form': judgment_form,
'revision_form': revision_form
}
if view_class:
return view_class.render_template('paper.html', paper.event, **params)
else:
return render_template('events/papers/paper.html', no_javascript=True, standalone=True, **params)
class WPDisplayReviewingArea(WPDisplayPapersBase):
menu_entry_name = 'paper_reviewing_area'
class WPDisplayCallForPapers(WPDisplayPapersBase):
menu_entry_name = 'call_for_papers'
class WPNewDisplayCallForPapers(WPDisplayPapersBase):
menu_entry_name = 'call_for_papers'
bundles = ('module_events.papers.css',)
| mic4ael/indico | indico/modules/events/papers/views.py | Python | mit | 2,534 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import os
import json
import logging
import bpy
from bpy_extras.io_utils import ExportHelper
from bpy.props import (
EnumProperty,
BoolProperty,
FloatProperty,
IntProperty,
StringProperty
)
from . import constants
logging.basicConfig(
format='%(levelname)s:THREE:%(message)s',
level=logging.DEBUG)
SETTINGS_FILE_EXPORT = 'three_settings_export.js'
bl_info = {
'name': "Three.js Format",
'author': "repsac, mrdoob, yomotsu, mpk, jpweeks",
'version': (1, 2, 3),
'blender': (2, 7, 3),
'location': "File > Export",
'description': "Export Three.js formatted JSON files.",
'warning': "Importer not included.",
'wiki_url': "https://github.com/mrdoob/three.js/tree/"\
"master/utils/exporters/blender",
'tracker_url': "https://github.com/mrdoob/three.js/issues",
'category': 'Import-Export'
}
def _geometry_types():
"""The valid geometry types that are supported by Three.js
:return: list of tuples
"""
keys = (constants.GLOBAL,
constants.GEOMETRY,
constants.BUFFER_GEOMETRY)
types = []
for key in keys:
types.append((key, key.title(), key))
return types
bpy.types.Mesh.THREE_geometry_type = EnumProperty(
name="Geometry type",
description="Geometry type",
items=_geometry_types(),
default=constants.GLOBAL)
class ThreeMesh(bpy.types.Panel):
"""Creates custom properties on a mesh node"""
bl_label = 'THREE'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'data'
def draw(self, context):
"""
:param context:
"""
row = self.layout.row()
if context.mesh:
row.prop(context.mesh,
'THREE_geometry_type',
text="Type")
def _blending_types(index):
"""Supported blending types for Three.js
:param index:
:type index: int
:returns: tuple if types (str, str, str)
"""
types = (constants.BLENDING_TYPES.NONE,
constants.BLENDING_TYPES.NORMAL,
constants.BLENDING_TYPES.ADDITIVE,
constants.BLENDING_TYPES.SUBTRACTIVE,
constants.BLENDING_TYPES.MULTIPLY,
constants.BLENDING_TYPES.CUSTOM)
return (types[index], types[index], types[index])
bpy.types.Material.THREE_blending_type = EnumProperty(
name="Blending type",
description="Blending type",
items=[_blending_types(x) for x in range(5)],
default=constants.BLENDING_TYPES.NORMAL)
bpy.types.Material.THREE_depth_write = BoolProperty(default=True)
bpy.types.Material.THREE_depth_test = BoolProperty(default=True)
class ThreeMaterial(bpy.types.Panel):
"""Adds custom properties to the Materials of an object"""
bl_label = 'THREE'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'material'
def draw(self, context):
"""
:param context:
"""
layout = self.layout
mat = context.material
if mat is not None:
row = layout.row()
row.label(text="Selected material: %s" % mat.name)
row = layout.row()
row.prop(mat, 'THREE_blending_type',
text="Blending type")
row = layout.row()
row.prop(mat, 'THREE_depth_write',
text="Enable depth writing")
row = layout.row()
row.prop(mat, 'THREE_depth_test',
text="Enable depth testing")
def _mag_filters(index):
"""Three.js mag filters
:param index:
:type index: int
:returns: tuple with the filter values
"""
types = (constants.LINEAR_FILTERS.LINEAR,
constants.NEAREST_FILTERS.NEAREST)
return (types[index], types[index], types[index])
bpy.types.Texture.THREE_mag_filter = EnumProperty(
name="Mag Filter",
items=[_mag_filters(x) for x in range(2)],
default=constants.LINEAR_FILTERS.LINEAR)
def _min_filters(index):
"""Three.js min filters
:param index:
:type index: int
:returns: tuple with the filter values
"""
types = (constants.LINEAR_FILTERS.LINEAR,
constants.LINEAR_FILTERS.MIP_MAP_NEAREST,
constants.LINEAR_FILTERS.MIP_MAP_LINEAR,
constants.NEAREST_FILTERS.NEAREST,
constants.NEAREST_FILTERS.MIP_MAP_NEAREST,
constants.NEAREST_FILTERS.MIP_MAP_LINEAR)
return (types[index], types[index], types[index])
bpy.types.Texture.THREE_min_filter = EnumProperty(
name="Min Filter",
items=[_min_filters(x) for x in range(6)],
default=constants.LINEAR_FILTERS.MIP_MAP_LINEAR)
def _mapping(index):
"""Three.js texture mappings types
:param index:
:type index: int
:returns: tuple with the mapping values
"""
types = (constants.MAPPING_TYPES.UV,
constants.MAPPING_TYPES.CUBE_REFLECTION,
constants.MAPPING_TYPES.CUBE_REFRACTION,
constants.MAPPING_TYPES.SPHERICAL_REFLECTION)
return (types[index], types[index], types[index])
bpy.types.Texture.THREE_mapping = EnumProperty(
name="Mapping",
items=[_mapping(x) for x in range(4)],
default=constants.MAPPING_TYPES.UV)
class ThreeTexture(bpy.types.Panel):
"""Adds custom properties to a texture"""
bl_label = 'THREE'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'texture'
#@TODO: possible to make cycles compatible?
def draw(self, context):
"""
:param context:
"""
layout = self.layout
tex = context.texture
if tex is not None:
row = layout.row()
row.prop(tex, 'THREE_mapping', text="Mapping")
row = layout.row()
row.prop(tex, 'THREE_mag_filter', text="Mag Filter")
row = layout.row()
row.prop(tex, 'THREE_min_filter', text="Min Filter")
bpy.types.Object.THREE_export = bpy.props.BoolProperty(default=True)
class ThreeObject(bpy.types.Panel):
"""Adds custom properties to an object"""
bl_label = 'THREE'
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'object'
def draw(self, context):
"""
:param context:
"""
layout = self.layout
obj = context.object
row = layout.row()
row.prop(obj, 'THREE_export', text='Export')
def get_settings_fullpath():
"""
:returns: Full path to the settings file (temp directory)
"""
return os.path.join(bpy.app.tempdir, SETTINGS_FILE_EXPORT)
def save_settings_export(properties):
"""Save the current export settings to disk.
:param properties:
:returns: settings
:rtype: dict
"""
settings = {
constants.VERTICES: properties.option_vertices,
constants.FACES: properties.option_faces,
constants.NORMALS: properties.option_normals,
constants.SKINNING: properties.option_skinning,
constants.BONES: properties.option_bones,
constants.GEOMETRY_TYPE: properties.option_geometry_type,
constants.MATERIALS: properties.option_materials,
constants.UVS: properties.option_uv_coords,
constants.FACE_MATERIALS: properties.option_face_materials,
constants.MAPS: properties.option_maps,
constants.COLORS: properties.option_colors,
constants.MIX_COLORS: properties.option_mix_colors,
constants.SCALE: properties.option_scale,
constants.ENABLE_PRECISION: properties.option_round_off,
constants.PRECISION: properties.option_round_value,
constants.LOGGING: properties.option_logging,
constants.COMPRESSION: properties.option_compression,
constants.INDENT: properties.option_indent,
constants.COPY_TEXTURES: properties.option_copy_textures,
constants.TEXTURE_FOLDER: properties.option_texture_folder,
constants.SCENE: properties.option_export_scene,
#constants.EMBED_GEOMETRY: properties.option_embed_geometry,
constants.EMBED_ANIMATION: properties.option_embed_animation,
constants.LIGHTS: properties.option_lights,
constants.CAMERAS: properties.option_cameras,
constants.MORPH_TARGETS: properties.option_animation_morph,
constants.ANIMATION: properties.option_animation_skeletal,
constants.FRAME_STEP: properties.option_frame_step,
constants.FRAME_INDEX_AS_TIME: properties.option_frame_index_as_time,
constants.INFLUENCES_PER_VERTEX: properties.option_influences
}
fname = get_settings_fullpath()
logging.debug("Saving settings to %s", fname)
with open(fname, 'w') as stream:
json.dump(settings, stream)
return settings
def restore_settings_export(properties):
"""Restore the settings (if settings file is found on disk)
If not found thend default to paramgers defined in
constants.EXPORT_OPTIONS
:param properties:
"""
settings = {}
fname = get_settings_fullpath()
if os.path.exists(fname) and os.access(fname, os.R_OK):
logging.debug("Settings cache found %s", fname)
with open(fname, 'r') as fs:
settings = json.load(fs)
else:
logging.debug("No settings file found, using defaults.")
## Geometry {
properties.option_vertices = settings.get(
constants.VERTICES,
constants.EXPORT_OPTIONS[constants.VERTICES])
properties.option_faces = settings.get(
constants.FACES,
constants.EXPORT_OPTIONS[constants.FACES])
properties.option_normals = settings.get(
constants.NORMALS,
constants.EXPORT_OPTIONS[constants.NORMALS])
properties.option_skinning = settings.get(
constants.SKINNING,
constants.EXPORT_OPTIONS[constants.SKINNING])
properties.option_bones = settings.get(
constants.BONES,
constants.EXPORT_OPTIONS[constants.BONES])
properties.option_influences = settings.get(
constants.INFLUENCES_PER_VERTEX,
constants.EXPORT_OPTIONS[constants.INFLUENCES_PER_VERTEX])
properties.option_geometry_type = settings.get(
constants.GEOMETRY_TYPE,
constants.EXPORT_OPTIONS[constants.GEOMETRY_TYPE])
## }
## Materials {
properties.option_materials = settings.get(
constants.MATERIALS,
constants.EXPORT_OPTIONS[constants.MATERIALS])
properties.option_uv_coords = settings.get(
constants.UVS,
constants.EXPORT_OPTIONS[constants.UVS])
properties.option_face_materials = settings.get(
constants.FACE_MATERIALS,
constants.EXPORT_OPTIONS[constants.FACE_MATERIALS])
properties.option_maps = settings.get(
constants.MAPS,
constants.EXPORT_OPTIONS[constants.MAPS])
properties.option_colors = settings.get(
constants.COLORS,
constants.EXPORT_OPTIONS[constants.COLORS])
properties.option_mix_colors = settings.get(
constants.MIX_COLORS,
constants.EXPORT_OPTIONS[constants.MIX_COLORS])
## }
## Settings {
properties.option_scale = settings.get(
constants.SCALE,
constants.EXPORT_OPTIONS[constants.SCALE])
properties.option_round_off = settings.get(
constants.ENABLE_PRECISION,
constants.EXPORT_OPTIONS[constants.ENABLE_PRECISION])
properties.option_round_value = settings.get(
constants.PRECISION,
constants.EXPORT_OPTIONS[constants.PRECISION])
properties.option_logging = settings.get(
constants.LOGGING,
constants.EXPORT_OPTIONS[constants.LOGGING])
properties.option_compression = settings.get(
constants.COMPRESSION,
constants.NONE)
properties.option_indent = settings.get(
constants.INDENT,
constants.EXPORT_OPTIONS[constants.INDENT])
properties.option_copy_textures = settings.get(
constants.COPY_TEXTURES,
constants.EXPORT_OPTIONS[constants.COPY_TEXTURES])
properties.option_texture_folder = settings.get(
constants.TEXTURE_FOLDER,
constants.EXPORT_OPTIONS[constants.TEXTURE_FOLDER])
properties.option_embed_animation = settings.get(
constants.EMBED_ANIMATION,
constants.EXPORT_OPTIONS[constants.EMBED_ANIMATION])
## }
## Scene {
properties.option_export_scene = settings.get(
constants.SCENE,
constants.EXPORT_OPTIONS[constants.SCENE])
#properties.option_embed_geometry = settings.get(
# constants.EMBED_GEOMETRY,
# constants.EXPORT_OPTIONS[constants.EMBED_GEOMETRY])
properties.option_lights = settings.get(
constants.LIGHTS,
constants.EXPORT_OPTIONS[constants.LIGHTS])
properties.option_cameras = settings.get(
constants.CAMERAS,
constants.EXPORT_OPTIONS[constants.CAMERAS])
## }
## Animation {
properties.option_animation_morph = settings.get(
constants.MORPH_TARGETS,
constants.EXPORT_OPTIONS[constants.MORPH_TARGETS])
properties.option_animation_skeletal = settings.get(
constants.ANIMATION,
constants.EXPORT_OPTIONS[constants.ANIMATION])
properties.option_frame_step = settings.get(
constants.FRAME_STEP,
constants.EXPORT_OPTIONS[constants.FRAME_STEP])
properties.option_frame_index_as_time = settings.get(
constants.FRAME_INDEX_AS_TIME,
constants.EXPORT_OPTIONS[constants.FRAME_INDEX_AS_TIME])
## }
def compression_types():
"""Supported compression formats
:rtype: tuple
"""
types = [(constants.NONE, constants.NONE, constants.NONE)]
try:
import msgpack
types.append((constants.MSGPACK, constants.MSGPACK,
constants.MSGPACK))
except ImportError:
pass
return types
def animation_options():
"""The supported skeletal animation types
:returns: list of tuples
"""
anim = [
(constants.OFF, constants.OFF.title(), constants.OFF),
(constants.POSE, constants.POSE.title(), constants.POSE),
(constants.REST, constants.REST.title(), constants.REST)
]
return anim
class ExportThree(bpy.types.Operator, ExportHelper):
"""Class that handles the export properties"""
bl_idname = 'export.three'
bl_label = 'Export THREE'
filename_ext = constants.EXTENSION
option_vertices = BoolProperty(
name="Vertices",
description="Export vertices",
default=constants.EXPORT_OPTIONS[constants.VERTICES])
option_faces = BoolProperty(
name="Faces",
description="Export faces",
default=constants.EXPORT_OPTIONS[constants.FACES])
option_normals = BoolProperty(
name="Normals",
description="Export normals",
default=constants.EXPORT_OPTIONS[constants.NORMALS])
option_colors = BoolProperty(
name="Vertex Colors",
description="Export vertex colors",
default=constants.EXPORT_OPTIONS[constants.COLORS])
option_mix_colors = BoolProperty(
name="Mix Colors",
description="Mix material and vertex colors",
default=constants.EXPORT_OPTIONS[constants.MIX_COLORS])
option_uv_coords = BoolProperty(
name="UVs",
description="Export texture coordinates",
default=constants.EXPORT_OPTIONS[constants.UVS])
option_materials = BoolProperty(
name="Materials",
description="Export materials",
default=constants.EXPORT_OPTIONS[constants.MATERIALS])
option_face_materials = BoolProperty(
name="Face Materials",
description="Face mapping materials",
default=constants.EXPORT_OPTIONS[constants.FACE_MATERIALS])
option_maps = BoolProperty(
name="Textures",
description="Include texture maps",
default=constants.EXPORT_OPTIONS[constants.MAPS])
option_skinning = BoolProperty(
name="Skinning",
description="Export skin data",
default=constants.EXPORT_OPTIONS[constants.SKINNING])
option_bones = BoolProperty(
name="Bones",
description="Export bones",
default=constants.EXPORT_OPTIONS[constants.BONES])
option_scale = FloatProperty(
name="Scale",
description="Scale vertices",
min=0.01,
max=1000.0,
soft_min=0.01,
soft_max=1000.0,
default=constants.EXPORT_OPTIONS[constants.SCALE])
option_round_off = BoolProperty(
name="Enable Precision",
description="Round off floating point values",
default=constants.EXPORT_OPTIONS[constants.ENABLE_PRECISION])
option_round_value = IntProperty(
name="Precision",
min=0,
max=16,
description="Floating point precision",
default=constants.EXPORT_OPTIONS[constants.PRECISION])
logging_types = [
(constants.DEBUG, constants.DEBUG, constants.DEBUG),
(constants.INFO, constants.INFO, constants.INFO),
(constants.WARNING, constants.WARNING, constants.WARNING),
(constants.ERROR, constants.ERROR, constants.ERROR),
(constants.CRITICAL, constants.CRITICAL, constants.CRITICAL)]
option_logging = EnumProperty(
name="",
description="Logging verbosity level",
items=logging_types,
default=constants.DEBUG)
option_geometry_type = EnumProperty(
name="Type",
description="Geometry type",
items=_geometry_types()[1:],
default=constants.GEOMETRY)
option_export_scene = BoolProperty(
name="Scene",
description="Export scene",
default=constants.EXPORT_OPTIONS[constants.SCENE])
#@TODO: removing this option since the ObjectLoader doesn't have
# support for handling external geometry data
#option_embed_geometry = BoolProperty(
# name="Embed geometry",
# description="Embed geometry",
# default=constants.EXPORT_OPTIONS[constants.EMBED_GEOMETRY])
option_embed_animation = BoolProperty(
name="Embed animation",
description="Embed animation data with the geometry data",
default=constants.EXPORT_OPTIONS[constants.EMBED_ANIMATION])
option_copy_textures = BoolProperty(
name="Copy textures",
description="Copy textures",
default=constants.EXPORT_OPTIONS[constants.COPY_TEXTURES])
option_texture_folder = StringProperty(name="Texture folder",
description="add this folder to textures path", default="")
option_lights = BoolProperty(
name="Lights",
description="Export default scene lights",
default=False)
option_cameras = BoolProperty(
name="Cameras",
description="Export default scene cameras",
default=False)
option_animation_morph = BoolProperty(
name="Morph animation",
description="Export animation (morphs)",
default=constants.EXPORT_OPTIONS[constants.MORPH_TARGETS])
option_animation_skeletal = EnumProperty(
name="",
description="Export animation (skeletal)",
items=animation_options(),
default=constants.OFF)
option_frame_index_as_time = BoolProperty(
name="Frame index as time",
description="Use (original) frame index as frame time",
default=constants.EXPORT_OPTIONS[constants.FRAME_INDEX_AS_TIME])
option_frame_step = IntProperty(
name="Frame step",
description="Animation frame step",
min=1,
max=1000,
soft_min=1,
soft_max=1000,
default=1)
option_indent = BoolProperty(
name="Indent JSON",
description="Disable this to reduce the file size",
default=constants.EXPORT_OPTIONS[constants.INDENT])
option_compression = EnumProperty(
name="",
description="Compression options",
items=compression_types(),
default=constants.NONE)
option_influences = IntProperty(
name="Influences",
description="Maximum number of bone influences",
min=1,
max=4,
default=2)
def invoke(self, context, event):
restore_settings_export(self.properties)
return ExportHelper.invoke(self, context, event)
@classmethod
def poll(cls, context):
"""
:param context:
"""
return context.active_object is not None
def execute(self, context):
"""
:param context:
"""
if not self.properties.filepath:
raise Exception("filename not set")
settings = save_settings_export(self.properties)
settings['addon_version'] = bl_info['version']
filepath = self.filepath
if settings[constants.COMPRESSION] == constants.MSGPACK:
filepath = "%s%s" % (filepath[:-4], constants.PACK)
from io_three import exporter
if settings[constants.SCENE]:
exporter.export_scene(filepath, settings)
else:
exporter.export_geometry(filepath, settings)
return {'FINISHED'}
def draw(self, context):
"""
:param context:
"""
layout = self.layout
## Geometry {
row = layout.row()
row.label(text="GEOMETRY:")
row = layout.row()
row.prop(self.properties, 'option_vertices')
row.prop(self.properties, 'option_faces')
row = layout.row()
row.prop(self.properties, 'option_normals')
row.prop(self.properties, 'option_uv_coords')
row = layout.row()
row.prop(self.properties, 'option_bones')
row.prop(self.properties, 'option_skinning')
row = layout.row()
row.prop(self.properties, 'option_geometry_type')
## }
layout.separator()
## Materials {
row = layout.row()
row.label(text="- Shading:")
row = layout.row()
row.prop(self.properties, 'option_face_materials')
row = layout.row()
row.prop(self.properties, 'option_colors')
row = layout.row()
row.prop(self.properties, 'option_mix_colors')
## }
layout.separator()
## Animation {
row = layout.row()
row.label(text="- Animation:")
row = layout.row()
row.prop(self.properties, 'option_animation_morph')
row = layout.row()
row.label(text="Skeletal animations:")
row = layout.row()
row.prop(self.properties, 'option_animation_skeletal')
layout.row()
row = layout.row()
row.prop(self.properties, 'option_influences')
row = layout.row()
row.prop(self.properties, 'option_frame_step')
row = layout.row()
row.prop(self.properties, 'option_frame_index_as_time')
row = layout.row()
row.prop(self.properties, 'option_embed_animation')
## }
layout.separator()
## Scene {
row = layout.row()
row.label(text="SCENE:")
row = layout.row()
row.prop(self.properties, 'option_export_scene')
row.prop(self.properties, 'option_materials')
#row = layout.row()
#row.prop(self.properties, 'option_embed_geometry')
row = layout.row()
row.prop(self.properties, 'option_lights')
row.prop(self.properties, 'option_cameras')
## }
layout.separator()
## Settings {
row = layout.row()
row.label(text="SETTINGS:")
row = layout.row()
row.prop(self.properties, 'option_maps')
row = layout.row()
row.prop(self.properties, 'option_copy_textures')
row = layout.row()
row.prop(self.properties, "option_texture_folder")
row = layout.row()
row.prop(self.properties, 'option_scale')
layout.row()
row = layout.row()
row.prop(self.properties, 'option_round_off')
row = layout.row()
row.prop(self.properties, 'option_round_value')
layout.row()
row = layout.row()
row.label(text="Logging verbosity:")
row = layout.row()
row.prop(self.properties, 'option_logging')
row = layout.row()
row.label(text="File compression format:")
row = layout.row()
row.prop(self.properties, 'option_compression')
row = layout.row()
row.prop(self.properties, 'option_indent')
## }
def menu_func_export(self, context):
"""
:param self:
:param context:
"""
default_path = bpy.data.filepath.replace('.blend', constants.EXTENSION)
text = "Three.js (%s)" % constants.EXTENSION
operator = self.layout.operator(ExportThree.bl_idname, text=text)
operator.filepath = default_path
def register():
"""Registers the addon (Blender boilerplate)"""
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
"""Unregisters the addon (Blender boilerplate)"""
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == '__main__':
register()
| archilogic-com/three.js | utils/exporters/blender/addons/io_three/__init__.py | Python | mit | 25,909 |
# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
__requires__ = """%(spec)r"""
from pkg_resources import require; require("""%(spec)r""")
del require
__file__ = """%(dev_path)r"""
execfile(__file__)
| chriskuehl/kloudless-status | venv/lib/python2.7/site-packages/distribute-0.6.34-py2.7.egg/setuptools/script template (dev).py | Python | mit | 202 |
from django.conf.urls import url, include
from rest_framework.urlpatterns import format_suffix_patterns
from views import customer_views, inventory_views, \
shipment_views, user_views, auth_views, contact_view, \
workorder_views
urlpatterns = [
url(r'^users/(?P<pk>[0-9]+)/$', user_views.UserDetail.as_view(), name = 'user_detail'),
# url(r'^shipments/$', shipment_views.ShipmentList.as_view(), name = 'shipment_list'),
# url(r'^shipments/(?P<shipid>[0-9]+)/$', shipment_views.ShipmentDetail.as_view(), name = 'shipment_detail'),
# url(r'^customers/$', customer_views.CustomerList.as_view(), name = 'customer_list'),
url(r'^customers/(?P<acct>[0-9]+)/$', customer_views.CustomerDetail.as_view(), name = 'customer_detail'),
# url(r'^inventory/$', inventory_views.InventoryList.as_view(), name = 'inventory_list'),
# url(r'^inventory/(?P<itemid>[0-9]+)/$', inventory_views.InventoryDetail.as_view(), name = 'inventory_detail'),
url(r'^submitorder/(?P<acct>[0-9]+)/$', user_views.receive_work_order, name = 'submit_order'),
# url(r'^invoice/(?P<shipid>[0-9]+)/$', shipment_views.shipment_report, name = 'invoice'),
# url(r'^invoice/(?P<shipid>[0-9]+)/$', shipment_views.ShipmentInvoice.as_view(), name = 'invoice'),
url(r'^workorders/(?P<id>[0-9])+/$', workorder_views.WorkOrderDetail.as_view()),
url(r'^workorders/$', workorder_views.WorkOrderList.as_view(), name = 'workorder_list'),
url(r'^contact/$', contact_view.contact_us, name = 'contact_us'),
url(r'^auth/login/$', auth_views.CustomTokenLogin.as_view(), name = 'login'),
url(r'^auth/', include('rest_auth.urls')) # This URL regex can't be terminated, so rest_auth.urls can take over
]
urlpatterns = format_suffix_patterns(urlpatterns)
| dannysellers/django_orders | tracker/api/urls.py | Python | gpl-2.0 | 1,763 |
# -*- coding: utf-8 -*-
# petgfunctions.py - This python "helper" script holds a lot of functions
# Copyright (c) 2012-2014 Harry van der Wolf. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public Licence as published
# by the Free Software Foundation, either version 2 of the Licence, or
# version 3 of the Licence, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public Licence for more details.
# This file is part of pyexiftoolgui.
# pyexiftoolgui is a pySide script program that reads and writes
# gps tags from/to files. It can use a "reference" image to write the
# gps tags to a multiple set of files that are taken at the same
# location.
# pyexiftoolgui is a graphical frontend for the open source
# command line tool exiftool by Phil Harvey, but it's not
# a complete exiftool gui: not at all.
import os, sys, platform, shlex, subprocess, time, re, string, datetime, math
import PySide
from PySide.QtCore import *
from PySide.QtGui import *
import programinfo
import programstrings
import petgfilehandling
from ui_create_args import Ui_Dialog_create_args
from ui_export_metadata import Ui_Dialog_export_metadata
from ui_remove_metadata import Ui_Dialog_remove_metadata
from ui_modifydatetime import Ui_DateTimeDialog
from ui_syncdatetime import Ui_SyncDateTimeTagsDialog
#------------------------------------------------------------------------
# All kind of functions
###################################################################################################################
# Start of Startup checks
###################################################################################################################
def remove_workspace( self ):
# Remove our temporary workspace
# try:
# fls = os.remove(self.tmpworkdir + "/*")
# except:
# print("No files in " + self.tmpworkdir + " or no folder at all")
# try:
# fldr = os.rmdir(self.tmpworkdir)
# except:
# print("Couldn't remove folder")
print(self.tmpworkdir)
if self.OSplatform == "Windows":
self.tmpworkdir = self.tmpworkdir.replace("/", "\\")
command_line = "rmdir /S /Q " + self.tmpworkdir
else:
command_line = "rm -rf " + self.tmpworkdir
p = os.system(command_line)
#args = shlex.split(command_line)
#print args
#p = subprocess.call(args, shell=True)
if p == 0:
print(("Removed " + self.tmpworkdir + " and it contents."))
else:
print(("Error removing " + self.tmpworkdir + " and it contents."))
def is_executable(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def check_for_program(program):
exists = False
for path in os.environ["PATH"].split(os.pathsep):
#program = program.replace("\"", "")
path_plus_program = os.path.join(path, program)
#print("path_plus_program " + str(path_plus_program))
if is_executable(path_plus_program):
#print "program " + program + " found"
exists = True
return exists
# End of function check_for_program and is_executable (mini sub for check_for_program)
def exiftool_version_level_text(self):
if float(self.exiftoolversion) < 9.07:
self.statusbar.showMessage("I will disable the GPano options as exiftool >=9.07 is required. You have " + str(self.exiftoolversion))
exiftoolleveltext = "Your exiftool version is " + str(self.exiftoolversion) + " . You need >=9.07 to write to images.\n"
exiftoolleveltext += "Exiftool and therefore pyExifToolGUI can read the tags. See the View Data tab."
self.lbl_exiftool_leveltext.setText(exiftoolleveltext)
elif float(self.exiftoolversion) < 9.09:
#else:
exiftoolleveltext = "Your exiftool version is " + str(self.exiftoolversion) + " . Tags marked with * are obligatory. "
exiftoolleveltext += "\"Pose Heading Degrees\" is necessary to make it also function in Google Maps.\n Tags marked with *** are only writable with exiftool >= 9.09"
self.lbl_exiftool_leveltext.setText(exiftoolleveltext)
self.statusbar.showMessage("Your exiftoolversion is " + str(self.exiftoolversion))
else:
exiftoolleveltext = "Your exiftool version is " + str(self.exiftoolversion) + " . Tags marked with * are obligatory. "
exiftoolleveltext += "\"Pose Heading Degrees\" is necessary to make it also function in Google Maps. Tags marked with *** are only writable with exiftool >= 9.09"
self.lbl_exiftool_leveltext.setText(exiftoolleveltext)
self.statusbar.showMessage("Your exiftoolversion is " + str(self.exiftoolversion))
#print "exiftoolversion : " + self.exiftoolversion
def find_on_path(tool):
""" Find the first occurrence of a tool on the path."""
paths = os.environ["PATH"].split(os.pathsep)
for path in paths:
path = os.path.join(path, tool)
if os.path.exists(path):
return path
def tool_check( self ):
# We need this startup check as long as we don't have a package
# that deals with dependencies
if self.alternate_exiftool == True:
self.exiftoolprog = self.exiftooloption.text()
else:
self.exiftoolprog = "exiftool"
if (self.OSplatform in ("Windows", "win32")):
self.exiftoolprog = find_on_path("exiftool.exe")
elif self.OSplatform == "Darwin":
self.exiftoolprog = find_on_path("exiftool")
#else:
# self.exiftoolprog = find_on_path("exiftool")
# Check for exiftool, based on the setting or no setting above
if (self.OSplatform in ("Windows", "win32")):
if ("exiftool.exe" in self.exiftoolprog) or ("Exiftool.exe" in self.exiftoolprog) or not self.exiftoolprog:
#self.exiftool_dir = os.path.join(self.realfile_dir, "exiftool", "exiftool.exe")
#self.exiftoolprog = self.exiftool_dir + "\exiftool.exe"
if not os.path.isfile(self.exiftoolprog):
configure_message = "exiftool is missing or incorrectly configured in Preferences!\n"
configure_message += "This tool is an absolute must have!\nPlease set the correct location or install exiftool first.\n\n"
configure_message += "If your exiftool is named \"exiftool(-k).exe\", rename it to \"exiftool.exe\""
ret = QMessageBox.critical(self, "exiftool is missing or incorrectly configured", configure_message)
result = self.select_exiftool()
#print str(result)
if result == "":
ret = QMessageBox.critical(self, "Canceled exiftool selection", "You canceled the exiftool selection.\nThe program will quit!\nFirst install exiftool or restart this program and select the correct exiftool.\nI will now (try to) open the exiftool website.")
try:
webbrowser.open("http://www.sno.phy.queensu.ca/~phil/exiftool/")
finally:
sys.exit()
else:
self.exiftoolprog = result
#Check exiftool version
args = '"' + self.exiftoolprog + '" -ver'
self.exiftoolversion = subprocess.check_output(args, shell=True)
# now check for the supported languages
args = '"' + self.exiftoolprog + '" -lang'
self.exiftoollanguages = subprocess.check_output(args, shell=True)
else:
if not check_for_program(self.exiftoolprog):
configure_message = "exiftool is missing or incorrectly configured in Preferences!\n"
configure_message += "This tool is an absolute must have!\nPlease set the correct location or install exiftool first."
ret = QMessageBox.critical(self, "exiftool is missing or incorrectly configured", configure_message)
result = self.select_exiftool()
#print str(result)
if result == "":
ret = QMessageBox.critical(self, "Canceled exiftool selection", "You canceled the exiftool selection.\nThe program will quit!\nFirst install exiftool or restart this program and select the correct exiftool.\nI will now (try to) open the exiftool website.")
try:
webbrowser.open("http://www.sno.phy.queensu.ca/~phil/exiftool/")
finally:
sys.exit()
else:
self.exiftoolprog = result
#Check exiftool version
command_line = '"' + self.exiftoolprog + '" -ver'
args = shlex.split(command_line)
self.exiftoolversion = subprocess.check_output(args)
# now check for the supported languages
command_line = '"' + self.exiftoolprog + '" -lang'
args = shlex.split(command_line)
self.exiftoollanguages = subprocess.check_output(args)
# remove last character which is the final ending \n (where \ is only the escape character)
self.exiftoolversion = self.exiftoolversion[:-1]
exiftool_version_level_text(self)
# End of function tool_check
def exitool_languages(self):
# First remove first line
self.exiftoollanguages = self.exiftoollanguages.splitlines(True)[1:]
dropdownlanguages = []
self.longlanguages = []
self.longlanguages.append(" ")
for language in self.exiftoollanguages:
try:
shortlang, longlang = re.split(' - ',language,1)
shortlang = shortlang.strip()
dropdownlanguages.append(shortlang)
longlang = longlang.strip()
self.longlanguages.append(longlang.decode('utf-8'))
#print("shortlang: " + shortlang + "; longlang: " + longlang)
except:
print("last character doesn't work. Only here in case that happens.")
self.comboBox_languages.addItems(dropdownlanguages)
###################################################################################################################
# End of Startup checks
###################################################################################################################
#------------------------------------------------------------------------
# General help messagebox
def help_mbox(self,helptitle, helptext):
self.helpmbox = QMessageBox()
self.helpmbox.setWindowTitle(helptitle)
self.helpmbox.setText(helptext)
ret = self.helpmbox.exec_()
#------------------------------------------------------------------------
# language combobox changed
def comboBox_languageschanged(self):
# if the language in the box gets changed, display the long text.
self.label_longlanguage.setText("Display language for tags and info: " + self.longlanguages[self.comboBox_languages.currentIndex()])
#------------------------------------------------------------------------
# image functions
def images_dialog(self, qApp):
loadedimages = QFileDialog(self)
qApp.processEvents()
loadedimages.setFileMode(QFileDialog.ExistingFiles)
if self.LineEdit_def_startupfolder.text() == "":
if self.OSplatform == "Darwin":
loadedimages.setDirectory(os.path.expanduser('~/Pictures'))
elif self.OSplatform == "Linux":
loadedimages.setDirectory(os.path.expanduser('~/Pictures'))
elif self.OSplatform == "Windows":
loadedimages.setDirectory(os.path.expanduser('~/My Pictures'))
else:
# User has obviously specified a startup folder
loadedimages.setDirectory(self.LineEdit_def_startupfolder.text())
qApp.processEvents()
self.statusbar.showMessage("Loading images")
qApp.processEvents()
# loadedimages.setNameFilter("image files (*.jpg *.tif *.tiff *.png)\nAll Files (*.*)")
loadedimages.setNameFilter("image files (" + programstrings.SUPPORTEDIMAGES + ")\nsupported formats (" + programstrings.SUPPORTEDFORMATS + ")\nAll Files (*.*)")
loadedimages.setViewMode(QFileDialog.Detail)
if loadedimages.exec_():
fileNames = loadedimages.selectedFiles()
qApp.processEvents()
else:
# user canceled
self.statusbar.showMessage("you canceled loading the images.")
fileNames = ""
return (fileNames)
def imagegridsizes(self, numImages):
colswidth = 100
cols = self.MaintableWidget.width()/float(colswidth+8.0)
return cols, colswidth
def loadimages(self ,fileNames, qApp):
print("Loaded images = " + str(fileNames))
print("Loaded %d images " % len(fileNames))
if len(fileNames) < 1:
# user canceled loading images
if self.DebugMsg:
print("user canceled loading images")
else:
cols, colwidth = imagegridsizes(self, len(fileNames))
print(imagegridsizes(self, len(fileNames)))
self.fileNames = fileNames
imagestring = ""
rowcounter = 0
total_images = len(fileNames)
self.progressbar.setRange(0, total_images)
self.progressbar.setValue(0)
self.progressbar.show()
qApp.processEvents()
self.MaintableWidget.clearContents()
if self.images_view.currentText() == "by cells":
self.MaintableWidget.setSelectionBehavior(QAbstractItemView.SelectItems)
self.MaintableWidget.setRowCount(math.ceil(len(fileNames)/cols))
self.MaintableWidget.setColumnCount(cols)
cols = int(cols)
else:
self.MaintableWidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.MaintableWidget.setRowCount(len(fileNames))
self.MaintableWidget.setColumnCount(2)
self.MaintableWidget.setColumnWidth(0,100)
self.MaintableWidget.setColumnWidth(1,225)
for loadedimage in fileNames:
if self.DebugMsg:
print(rowcounter)
print(loadedimage + "\n")
folder,imagefile = os.path.split(loadedimage)
#self.MaintableWidget.insertRow(rowcounter)
if self.images_view.currentText() == "by cells":
pass
else:
qtablefilename = QTableWidgetItem(imagefile)
self.MaintableWidget.setItem(rowcounter, 1, qtablefilename)
if self.pref_thumbnail_preview.isChecked():
# Now create the thumbnail to be displayed
thumbnail = QLabel(self)
thumbnail.setMargin(8)
image = QImage(loadedimage)
thumbnail.setPixmap(QPixmap.fromImage(image))
thumbnail.setScaledContents(True)
thumbnail.setToolTip(imagefile)
# Fill the table
if self.images_view.currentText() == "by cells":
self.MaintableWidget.setColumnWidth(int(rowcounter%cols),colwidth)
self.MaintableWidget.setRowHeight(int(rowcounter/cols),(colwidth*0.75))
self.MaintableWidget.setCellWidget(int(rowcounter/cols), int(rowcounter%cols), thumbnail)
else:
self.MaintableWidget.setRowHeight(rowcounter,75)
self.MaintableWidget.setCellWidget(rowcounter, 0, thumbnail)
else:
# Fill the table when thumbs are disabled
dis_thumb_string = QTableWidgetItem("disabled")
if self.images_view.currentText() == "by cells":
self.MaintableWidget.setItem(int(rowcounter/cols), int(rowcounter%cols), dis_thumb_string)
else:
self.MaintableWidget.setItem(rowcounter, 0, dis_thumb_string)
rowcounter += 1
self.progressbar.setValue(rowcounter)
self.statusbar.showMessage("Creating thumbnail of: " + os.path.basename(loadedimage))
qApp.processEvents()
imagestring += loadedimage + " "
self.image_folder = folder
self.MaintableWidget.setToolTip('image(s) folder: ' + folder)
if self.allDebugMsg:
QMessageBox.about(self, "file names", "images found \n %s" % imagestring)
# After loading the photos we will enable buttons and events
self.activate_buttons_events()
def imageinfo(self, qApp):
self.statusbar.showMessage("")
if self.images_view.currentText() == "by cells":
selected_row = self.MaintableWidget.currentRow()
selected_col = self.MaintableWidget.currentColumn()
selected_image = "\"" + self.fileNames[int((self.MaintableWidget.columnCount()*selected_row)+selected_col)] + "\""
else:
selected_row = self.MaintableWidget.currentRow()
selected_image = "\"" + self.fileNames[selected_row] + "\""
if self.radioButton_all.isChecked():
exiftool_params = ""
arguments = " -a "
header = "all tags"
elif self.radioButton_exif.isChecked():
exiftool_params = "-exif:all"
header = "EXIF tags"
elif self.radioButton_xmp.isChecked():
exiftool_params = "-xmp:all"
header = "XMP tags"
elif self.radioButton_iptc.isChecked():
exiftool_params = "-iptc:all"
header = "IPTC tags"
elif self.radioButton_iccprofile.isChecked():
exiftool_params = "-icc_profile:all"
header = "ICC profile tags"
elif self.radioButton_gps.isChecked():
exiftool_params = "-gps:all -xmp:GPSLatitude -xmp:GPSLongitude -xmp:Location -xmp:Country -xmp:State -xmp:City"
arguments = " -a -gps:all -xmp:GPSLatitude -xmp:GPSLongitude -xmp:Location -xmp:Country -xmp:State -xmp:City"
header = "GPS tags"
elif self.radioButton_gpano.isChecked():
exiftool_params = " -xmp:CroppedAreaImageHeightPixels -xmp:CroppedAreaImageWidthPixels -xmp:CroppedAreaLeftPixels -xmp:CroppedAreaTopPixels -xmp:FullPanoHeightPixels -xmp:FullPanoWidthPixels -xmp:ProjectionType -xmp:UsePanoramaViewer -xmp:PoseHeadingDegrees -xmp:InitialViewHeadingDegrees -xmp:InitialViewPitchDegrees -xmp:InitialViewRollDegrees -xmp:StitchingSoftware -xmp:InitialHorizontalFOVDegrees"
arguments = " -xmp:CroppedAreaImageHeightPixels -xmp:CroppedAreaImageWidthPixels -xmp:CroppedAreaLeftPixels -xmp:CroppedAreaTopPixels -xmp:FullPanoHeightPixels -xmp:FullPanoWidthPixels -xmp:ProjectionType -xmp:UsePanoramaViewer -xmp:PoseHeadingDegrees -xmp:InitialViewHeadingDegrees -xmp:InitialViewPitchDegrees -xmp:InitialViewRollDegrees -xmp:StitchingSoftware -xmp:InitialHorizontalFOVDegrees"
header = "GPano tags"
elif self.radioButton_makernotes.isChecked():
exiftool_params = "-makernotes:all"
header = "makernotes tags"
# Check if we need to display it in a specific language
if (self.comboBox_languages.currentText() == " ") or (self.comboBox_languages.currentText() == ""):
ETlang = ""
else:
ETlang = " -lang " + self.comboBox_languages.currentText() + " "
if self.OSplatform == "Windows":
selected_image = selected_image.replace("/", "\\")
args = "\"" + self.exiftoolprog + "\" -a " + ETlang + exiftool_params + " " + selected_image
p = subprocess.check_output(args, universal_newlines=True, shell=True)
else:
command_line = "\"" + self.exiftoolprog + "\" -a " + ETlang + exiftool_params + " " + selected_image
args = shlex.split(command_line)
p = subprocess.check_output(args, universal_newlines=True)
if len(p) == 0:
p = header + " : No data available\n"
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
self.exiftableWidget.clearContents()
self.exiftableWidget.setRowCount(0)
rowcounter = 0
for line in p_lines:
try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
descriptor = descriptor.decode('utf-8')
description = description.strip()
description = description.decode('utf-8')
#print "descriptor " + descriptor + " ;description " + description
self.exiftableWidget.insertRow(rowcounter)
self.exiftableWidget.setColumnWidth(0,225)
self.exiftableWidget.setColumnWidth(1,425)
self.exiftableWidget.setItem(rowcounter, 0, QTableWidgetItem(descriptor))
self.exiftableWidget.setItem(rowcounter, 1, QTableWidgetItem(description))
rowcounter += 1
qApp.processEvents()
except:
print("always the last line that doesn't work")
def copy_defaults(self, qApp, category):
if category == "exif":
self.exif_Artist.setText(self.def_creator.text())
self.exif_Copyright.setText(self.def_copyright.text())
elif category == "xmp":
self.xmp_creator.setText(self.def_creator.text())
self.xmp_rights.setText(self.def_copyright.text())
elif category == "iptc":
self.iptc_creator.setText(self.def_creator.text())
self.iptc_rights.setText(self.def_copyright.text())
#------------------------------------------------------------------------
# Edit -> Gps tab and actions
def convertLatLong(self, direction):
# only "int" at the latest moment or calculations go wrong
if direction == 'dms2d':
# first latitude
# Note that "South" latitudes and "West" longitudes convert to negative decimal numbers
if int(self.calc_lat_sec.text()) in range(0, 60):
latd = float(self.calc_lat_sec.text()) / float(60)
else:
ret = QMessageBox.critical(self, "seconds error", "seconds must fall in the range 0 to <60")
if int(self.calc_lat_min.text()) in range(0, 60):
latd = (int(self.calc_lat_min.text()) + latd) / float(60)
else:
ret = QMessageBox.critical(self, "minutes error", "minutes must fall in the range 0 to <60")
# check whether lat degrees falls within 0 and 89
if int(self.calc_lat_deg.text()) in range(0, 90):
latd = latd + int(self.calc_lat_deg.text())
else:
ret = QMessageBox.critical(self, "degrees error", "Latitude degrees must fall in the range 0 to 89")
if self.radioButton_calc_gpsS.isChecked(): # South
# this means a negative decimal latitude
latd = -(latd)
self.calc_latitude.setText(str(round(latd,6)))
# now longitude
if int(self.calc_lon_sec.text()) in range(0, 60):
lond = float(self.calc_lon_sec.text()) / float(60)
else:
ret = QMessageBox.critical(self, "seconds error", "seconds must fall in the range 0 to <60")
if int(self.calc_lon_min.text()) in range(0, 60):
lond = (int(self.calc_lon_min.text()) + lond) / float(60)
else:
ret = QMessageBox.critical(self, "minutes error", "minutes must fall in the range 0 to <60")
# check whether lon degrees falls within 0 and 179
if int(self.calc_lon_deg.text()) in range(0, 179):
lond = lond + int(self.calc_lon_deg.text())
else:
ret = QMessageBox.critical(self, "degrees error", "Longitude degrees must fall in the range 0 to 179")
if self.radioButton_calc_gpsW.isChecked(): # West
lond = -(lond)
# Update value in decimal latituted field
self.calc_longitude.setText(str(round(lond,6)))
else: # direction is d2dms
# First latitude
latitude = self.calc_latitude.text()
# First check on "," in string
if "," in latitude:
latitude = latitude.replace(',','.')
self.calc_latitude.setText(latitude)
# Now check whether we have a "." in our strong. If not we have an integer and re is not necessary
if "." in latitude:
latint, latremain = re.split('\.', latitude)
else:
latint = latitude
if int(latint) in range (-89, 89):
if (int(latint)) < 0:
latint = -(int(latint))
latitude = -(float(latitude))
self.radioButton_calc_gpsS.setChecked(1)
else:
self.radioButton_calc_gpsN.setChecked(1)
deg = str(latint)
self.calc_lat_deg.setText(deg)
min = (float(latitude) - int(deg)) * 60
self.calc_lat_min.setText(str(int(min)))
sec = int(round(((float(min) - int(min)) *60), 0))
self.calc_lat_sec.setText(str(sec))
else:
ret = QMessageBox.critical(self, "degrees error", "Latitude decimal must fall in the range -90 < degr < 90")
# Now longitude
longitude = self.calc_longitude.text()
# First check on "," in string
if "," in longitude:
longitude = longitude.replace(',','.')
self.calc_longitude.setText(longitude)
# Now check whether we have a "." in our strong. If not we have an integer and re is not necessary
if "." in longitude:
lonint, lonremain = re.split('\.',(self.calc_longitude.text()))
else:
lonint = longitude
if int(lonint) in range (-179, 179):
if (int(lonint)) < 0:
lonint = -(int(lonint))
longitude = -(float(longitude))
self.radioButton_calc_gpsW.setChecked(1)
else:
self.radioButton_calc_gpsE.setChecked(1)
#longitude = float(lonint) + (float(lonremain)/(10**multiplier))
deg = str(lonint)
self.calc_lon_deg.setText(deg)
min = (float(longitude) - int(deg)) * 60
self.calc_lon_min.setText(str(int(min)))
sec = int(round(((float(min) - int(min)) *60), 0))
self.calc_lon_sec.setText(str(sec))
else:
ret = QMessageBox.critical(self, "degrees error", "Longitude decimal must fall in the range -180 < degr < 180")
def clear_gps_fields(self):
self.calc_lat_deg.setText("")
self.calc_lat_min.setText("")
self.calc_lat_sec.setText("")
self.calc_latitude.setText("")
self.radioButton_calc_gpsN.setChecked(1)
self.calc_lon_deg.setText("")
self.calc_lon_min.setText("")
self.calc_lon_sec.setText("")
self.calc_longitude.setText("")
self.gps_lat_decimal.setText("")
self.gps_lon_decimal.setText("")
self.radioButton_calc_gpsE.setChecked(1)
self.gps_altitude.setText("")
self.chk_AboveSeaLevel.setChecked(1)
self.gps_lat_deg.setText("")
self.gps_lat_min.setText("")
self.gps_lat_sec.setText("")
self.gps_lon_deg.setText("")
self.gps_lon_min.setText("")
self.gps_lon_sec.setText("")
self.radioButton_gpsN.setChecked(1)
self.radioButton_gpsE.setChecked(1)
self.xmp_location.setText("")
self.xmp_country.setText("")
self.xmp_state.setText("")
self.xmp_city.setText("")
self.chk_xmp_location.setChecked(1)
self.chk_xmp_country.setChecked(1)
self.chk_xmp_state.setChecked(1)
self.chk_xmp_city.setChecked(1)
self.gps_timestamp.setText("")
self.gps_datestamp.setText("")
self.gps_versionid.setText("")
self.gps_mapdatum.setText("")
self.chk_gps_timestamp.setChecked(1)
self.chk_gps_datestamp.setChecked(1)
def copy_calc_to_gpsinput(self):
self.gps_lat_decimal.setText(self.calc_latitude.text())
self.gps_lon_decimal.setText(self.calc_longitude.text())
self.gps_lat_deg.setText(self.calc_lat_deg.text())
self.gps_lat_min.setText(self.calc_lat_min.text())
self.gps_lat_sec.setText(self.calc_lat_sec.text())
self.gps_lon_deg.setText(self.calc_lon_deg.text())
self.gps_lon_min.setText(self.calc_lon_min.text())
self.gps_lon_sec.setText(self.calc_lon_sec.text())
if self.radioButton_calc_gpsN.isChecked():
self.radioButton_gpsN.setChecked(1)
else:
self.radioButton_gpsS.setChecked(1)
if self.radioButton_calc_gpsE.isChecked():
self.radioButton_gpsE.setChecked(1)
else:
self.radioButton_gpsW.setChecked(1)
def d2dms(self, value, sort):
# This is a simplified one-way copy of the convertLatLong function above for the input read-only fields
# Both cold be integrated, more efficient, but this is faster to maintain (and I'm lazy)
value = abs(float(value))
deg = int(value)
min = (float(value) - int(deg)) * 60
sec = int(round(((float(min) - int(min)) *60), 0))
# only "int" at the latest moment or calculations go wrong
if sort == "lat":
self.gps_lat_deg.setText(str(deg))
self.gps_lat_min.setText(str(int(min)))
self.gps_lat_sec.setText(str(sec))
else:
self.gps_lon_deg.setText(str(deg))
self.gps_lon_min.setText(str(int(min)))
self.gps_lon_sec.setText(str(sec))
def copygpsfromselected(self, qApp):
# First clean input fields
clear_gps_fields(self)
exiftool_params = ' -e -n -a -gps:all -xmp:Location -xmp:Country -xmp:State -xmp:City -xmp:GPSLatitude -xmp:GPSLongitude '
data = True
p = read_image_info(self, exiftool_params)
if len(p) == 0:
data = False
message = ("<p>You are trying to copy the gps/location info from your source image, but your source image "
"doesn't contain data or doesn't seem to contain data (or you didn't select an image).</p>"
"<p>In case your camera has a GPS system, but only uses it's internal \"maker\" options "
"to store the gps data, I can't retrieve the data as it is stored differently "
"for every brand of camera.</p>"
"<p>If this is the case for your camera, your only option is to copy & paste the information out "
"of the table rows from the \"General\" tab.")
ret = QMessageBox.warning(self, "Error copying gps info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n', p)
rowcounter = 0
for line in p_lines:
# try:
descriptor, description = re.split(':', line, 1)
descriptor = descriptor.strip()
description = description.strip()
gpslat = 0
gpslon = 0
latref = 0
lonref = 0
if descriptor == "GPS Version ID":
self.gps_versionid.setText(description)
if descriptor == "GPS Latitude Ref":
latref = 1
latrefvalue = description
if description == "N":
self.radioButton_gpsN.setChecked(1)
else:
self.radioButton_gpsS.setChecked(1)
if descriptor == "GPS Latitude":
gpslat += 1
if gpslat == 2:
print("we have a xmp latitude")
gpslatvalue = description
self.gps_lat_decimal.setText(str(round(float(description), 6)))
if descriptor == "GPS Longitude Ref":
lonref = 1
lonrefvalue = description
if description == "E":
self.radioButton_gpsE.setChecked(1)
else:
self.radioButton_gpsW.setChecked(1)
if descriptor == "GPS Longitude":
gpslon += 1
if gpslon == 2:
print("we have an xmp longitude")
gpslonvalue = description
self.gps_lon_decimal.setText(str(round(float(description), 6)))
if descriptor == "GPS Altitude Ref":
if description == "0":
self.chk_AboveSeaLevel.setChecked(1)
else:
self.chk_AboveSeaLevel.setChecked(0)
if descriptor == "GPS Altitude":
self.gps_altitude.setText(str(round(float(description), 1)))
if descriptor == "Location":
self.xmp_location.setText(description)
if descriptor == "Country":
self.xmp_country.setText(description)
if descriptor == "State":
self.xmp_state.setText(description)
if descriptor == "City":
self.xmp_city.setText(description)
if descriptor == "GPS Time Stamp":
self.gps_timestamp.setText(description)
if descriptor == "GPS Date Stamp":
self.gps_datestamp.setText(description)
if descriptor == "GPS Map Datum":
self.gps_mapdatum.setText(description)
# print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
# qApp.processEvents()
# except:
# print "always the last line that doesn't work"
# We bluntly walk through all tags as we don't know whether they are complete.
# Now we need to check for neg/pos latitutes and longitudes by REF values as
# We do not know now whether we have exif decimal values (always positive)
# or xmp decimal values which can be negative or positive.
# That's not so elegant but much simpler then building internal checks.
if latref == 1:
value = self.gps_lat_decimal.text()
if latrefvalue == "N":
self.gps_lat_decimal.setText(str(abs(float(value))))
else: # E = negative
if value.count('-') == 0: # doesn't contain a - but should contain it.
self.gps_lat_decimal.setText('-' + value)
if lonref == 1:
value = self.gps_lon_decimal.text()
if latrefvalue == "E":
self.gps_lon_decimal.setText(str(abs(float(value))))
else: # W = negative
if value.count('-') == 0: # doesn't contain a - but should contain it.
self.gps_lon_decimal.setText('-' + value)
# Check whether we have xmp lat/lon
if data:
d2dms(self, gpslatvalue, "lat")
d2dms(self, gpslonvalue, "lon")
def savegpsdata(self, qApp):
exiftool_params=""
# Exif and xmp gps data
if self.chk_lat_lon_alt.isChecked():
exiftool_params = ' -exif:GPSLatitude="' + self.gps_lat_decimal.text() + '" '
value = float(self.gps_lat_decimal.text())
if value > 0:
exiftool_params += ' -exif:GPSLatitudeREF="N" '
else:
exiftool_params += ' -exif:GPSLatitudeREF="S" '
exiftool_params += ' -xmp:GPSLatitude="' + self.gps_lat_decimal.text() + '" '
exiftool_params += ' -exif:GPSLongitude="' + self.gps_lon_decimal.text() + '" '
value = float(self.gps_lon_decimal.text())
if value > 0:
exiftool_params += ' -exif:GPSLongitudeREF="E" '
else:
exiftool_params += ' -exif:GPSLongitudeREF="W" '
exiftool_params += ' -xmp:GPSLongitude="' + self.gps_lon_decimal.text() + '" '
exiftool_params += ' -exif:GPSAltitude="' + self.gps_altitude.text() + '" '
exiftool_params += ' -xmp:GPSAltitude="' + self.gps_altitude.text() + '" '
if self.chk_AboveSeaLevel.isChecked():
exiftool_params += ' -exif:GPSAltitudeRef="above" ' # Above sea level
else:
exiftool_params += ' -exif:GPSAltitudeRef="below" ' # Below sea level
# Location data for XMP and IPTC
if self.chk_xmp_location.isChecked():
exiftool_params += '-xmp:Location="' + self.xmp_location.text() + '" '
exiftool_params += '-iptc:Sub-location="' + self.xmp_location.text() + '" '
if self.chk_xmp_country.isChecked():
exiftool_params += '-xmp:Country="' + self.xmp_country.text() + '" '
exiftool_params += '-iptc:Country-PrimaryLocationName="' + self.xmp_country.text() + '" '
if self.chk_xmp_state.isChecked():
exiftool_params += '-xmp:State="' + self.xmp_state.text() + '" '
exiftool_params += '-iptc:Province-State="' + self.xmp_state.text() + '" '
if self.chk_xmp_city.isChecked():
exiftool_params += '-xmp:City="' + self.xmp_city.text() + '" '
exiftool_params += '-iptc:City="' + self.xmp_city.text() + '" '
# Map date/time and format stuff
if self.chk_gps_timestamp.isChecked():
exiftool_params += '-exif:Copyright="' + self.exif_Copyright.text() + '" '
if self.chk_gps_datestamp.isChecked():
exiftool_params += '-exif:UserComment="' + self.exif_UserComment.text() + '" '
if self.gps_mapdatum.text() == "":
exiftool_params += '-exif:GPSMapDatum="WGS-84" '
else:
exiftool_params += '-exif:GPSMapDatum="' + self.gps_mapdatum.text() + '" '
print(exiftool_params)
# Now write the data to the photo(s)
if self.chk_gps_backuporiginals.isChecked():
write_image_info(self, exiftool_params, qApp, True)
else:
write_image_info(self, exiftool_params, qApp, False)
#------------------------------------------------------------------------
# Edit -> Exif tab and actions
def clear_exif_fields(self):
self.exif_Make.setText("")
self.exif_Model.setText("")
self.exif_ModifyDate.setText("")
self.exif_DateTimeOriginal.setText("")
self.exif_CreateDate.setText("")
self.exif_Artist.setText("")
self.exif_Copyright.setText("")
self.exif_UserComment.setText("")
self.exif_ImageDescription.clear()
self.chk_exif_Make.setChecked(1)
self.chk_exif_Model.setChecked(1)
self.chk_exif_ModifyDate.setChecked(1)
self.chk_exif_DateTimeOriginal.setChecked(1)
self.chk_exif_CreateDate.setChecked(1)
self.chk_exif_Artist.setChecked(1)
self.chk_exif_Copyright.setChecked(1)
self.chk_exif_UserComment.setChecked(1)
self.chk_exif_ImageDescription.setChecked(1)
def copyexiffromselected(self,qApp):
# First clean input fields
clear_exif_fields(self)
exiftool_params = ' -e -n -exif:Make -exif:Model -exif:ModifyDate -exif:DateTimeOriginal -exif:CreateDate -exif:Artist -exif:Copyright -exif:UserComment -exif:ImageDescription '
p = read_image_info(self, exiftool_params)
if len(p) == 0:
data = False
message = ("<p>You are trying to copy exif info from your source image, but your source image "
"doesn't contain the specified exif data or doesn't seem to contain any exif data (or you didn't select an image).</p>")
ret = QMessageBox.warning(self, "Error copying exif info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
rowcounter = 0
for line in p_lines:
#try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
gpslat = 0
gpslon = 0
if descriptor == "Make":
self.exif_Make.setText(description)
if descriptor == "Camera Model Name":
self.exif_Model.setText(description)
if descriptor == "Modify Date":
self.exif_ModifyDate.setText(description)
if descriptor == "Date/Time Original":
self.exif_DateTimeOriginal.setText(description)
if descriptor == "Create Date":
self.exif_CreateDate.setText(description)
if descriptor == "Artist":
self.exif_Artist.setText(description)
if descriptor == "Copyright":
self.exif_Copyright.setText(description)
if descriptor == "User Comment":
self.exif_UserComment.setText(description)
if descriptor == "Image Description":
self.exif_ImageDescription.insertPlainText(description)
#print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
def saveexifdata(self, qApp):
exiftool_params = ""
if self.chk_exif_Make.isChecked():
exiftool_params = ' -exif:Make="' + self.exif_Make.text() + '" '
if self.chk_exif_Model.isChecked():
exiftool_params += '-exif:Model="' + self.exif_Model.text() + '" '
if self.chk_exif_ModifyDate.isChecked():
exiftool_params += '-exif:ModifyDate="' + self.exif_ModifyDate.text() + '" '
if self.chk_exif_DateTimeOriginal.isChecked():
exiftool_params += '-exif:DateTimeOriginal="' + self.exif_DateTimeOriginal.text() + '" '
if self.chk_exif_CreateDate.isChecked():
exiftool_params += '-exif:CreateDate="' + self.exif_CreateDate.text() + '" '
if self.chk_exif_Artist.isChecked():
exiftool_params += '-exif:Artist="' + self.exif_Artist.text() + '" '
if self.chk_exif_Copyright.isChecked():
exiftool_params += '-exif:Copyright="' + self.exif_Copyright.text() + '" '
if self.chk_exif_UserComment.isChecked():
exiftool_params += '-exif:UserComment="' + self.exif_UserComment.text() + '" '
if self.chk_exif_ImageDescription.isChecked():
ImgDescr = self.exif_ImageDescription.toPlainText()
exiftool_params += '-exif:ImageDescription="' + ImgDescr + '" '
if self.chk_exif_backuporiginals.isChecked():
write_image_info(self, exiftool_params, qApp, True)
else:
write_image_info(self, exiftool_params, qApp, False)
#------------------------------------------------------------------------
# Edit -> xmp tab and actions
def clear_xmp_fields(self):
self.xmp_creator.setText("")
self.xmp_rights.setText("")
self.xmp_label.setText("")
self.xmp_subject.setText("")
self.xmp_title.setText("")
self.xmp_rating1.setChecked(1)
self.xmp_description.clear()
self.xmp_person.setText("")
self.chk_xmp_creator.setChecked(1)
self.chk_xmp_rights.setChecked(1)
self.chk_xmp_label.setChecked(1)
self.chk_xmp_subject.setChecked(1)
self.chk_xmp_title.setChecked(1)
self.chk_xmp_rating.setChecked(1)
self.chk_xmp_description.setChecked(1)
self.chk_xmp_person.setChecked(1)
def copyxmpfromselected(self,qApp):
# First clean input fields
clear_xmp_fields(self)
xmptool_params = ' -e -n -xmp:Creator -xmp:Rights -xmp:Label -xmp:Subject -xmp:Title -xmp:Rating -xmp:Description -xmp:Person -xmp:PersonInImage '
p = read_image_info(self, xmptool_params)
if len(p) == 0:
data = False
message = ("<p>You are trying to copy xmp info from your source image, but your source image "
"doesn't contain the specified xmp data or doesn't seem to contain any xmp data (or you didn't select an image).</p>")
ret = QMessageBox.warning(self, "Error copying xmp info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
rowcounter = 0
for line in p_lines:
#try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
gpslat = 0
gpslon = 0
if descriptor == "Creator":
self.xmp_creator.setText(description)
if descriptor == "Rights":
self.xmp_rights.setText(description)
if descriptor == "Label":
self.xmp_label.setText(description)
if descriptor == "Subject":
self.xmp_subject.setText(description)
if descriptor == "Title":
self.xmp_title.setText(description)
if descriptor == "Rating":
if description == "1":
self.xmp_rating1.setChecked(1)
elif description == "2":
self.xmp_rating2.setChecked(2)
elif description == "3":
self.xmp_rating3.setChecked(3)
elif description == "4":
self.xmp_rating4.setChecked(4)
elif description == "5":
self.xmp_rating5.setChecked(5)
if descriptor == "Description":
self.xmp_description.insertPlainText(description)
if descriptor == "Person":
self.xmp_person.setText(description)
if descriptor == "Person In Image":
self.xmp_person.setText(description)
#print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
def savexmpdata(self, qApp):
xmptool_params = ""
if self.chk_xmp_creator.isChecked():
xmptool_params = ' -xmp:Creator="' + self.xmp_creator.text() + '" '
if self.chk_xmp_rights.isChecked():
xmptool_params += '-xmp:Rights="' + self.xmp_rights.text() + '" '
if self.chk_xmp_label.isChecked():
xmptool_params += '-xmp:Label="' + self.xmp_label.text() + '" '
if self.chk_xmp_subject.isChecked():
xmptool_params += '-xmp:Subject="' + self.xmp_subject.text() + '" '
if self.chk_xmp_title.isChecked():
xmptool_params += '-xmp:Title="' + self.xmp_title.text() + '" '
if self.chk_xmp_rating.isChecked():
if self.xmp_rating1.isChecked():
rating = "1"
elif self.xmp_rating2.isChecked():
rating = "2"
elif self.xmp_rating3.isChecked():
rating = "3"
elif self.xmp_rating4.isChecked():
rating = "4"
else:
rating = "5"
xmptool_params += '-xmp:Rating="' + rating + '" '
if self.chk_xmp_description.isChecked():
Descr = self.xmp_description.toPlainText()
xmptool_params += '-xmp:Description="' + Descr + '" '
if self.chk_xmp_person.isChecked():
xmptool_params += '-xmp:Person="' + self.xmp_person.text() + '" '
xmptool_params += '-xmp:PersonInImage="' + self.xmp_person.text() + '" '
if self.chk_xmp_backuporiginals.isChecked():
write_image_info(self, xmptool_params, qApp, True)
else:
write_image_info(self, xmptool_params, qApp, False)
#------------------------------------------------------------------------
# Edit -> GPano tab and actions
def clear_gpano_fields(self):
self.xmp_StitchingSoftware.setText("")
self.xmp_CroppedAreaImageHeightPixels.setText("")
self.xmp_CroppedAreaImageWidthPixels.setText("")
self.xmp_CroppedAreaLeftPixels.setText("")
self.xmp_CroppedAreaTopPixels.setText("")
self.xmp_FullPanoHeightPixels.setText("")
self.xmp_FullPanoWidthPixels.setText("")
self.xmp_ProjectionType.setCurrentIndex(0)
self.xmp_UsePanoramaViewer.setChecked(1)
self.xmp_PoseHeadingDegrees.setText("")
self.xmp_InitialViewHeadingDegrees.setText("")
self.xmp_InitialViewPitchDegrees.setText("")
self.xmp_InitialViewRollDegrees.setText("")
self.xmp_InitialHorizontalFOVDegrees.setText("")
self.chk_xmp_StitchingSoftware.setChecked(1)
self.chk_xmp_CroppedAreaImageHeightPixels.setChecked(1)
self.chk_xmp_CroppedAreaImageWidthPixels.setChecked(1)
self.chk_xmp_CroppedAreaLeftPixels.setChecked(1)
self.chk_xmp_CroppedAreaTopPixels.setChecked(1)
self.chk_xmp_FullPanoHeightPixels.setChecked(1)
self.chk_xmp_FullPanoWidthPixels.setChecked(1)
self.chk_xmp_ProjectionType.setChecked(1)
self.chk_xmp_UsePanoramaViewer.setChecked(1)
self.chk_xmp_PoseHeadingDegrees.setChecked(1)
self.chk_xmp_InitialViewHeadingDegrees.setChecked(1)
self.chk_xmp_InitialViewPitchDegrees.setChecked(1)
self.chk_xmp_InitialViewRollDegrees.setChecked(1)
self.chk_xmp_InitialHorizontalFOVDegrees.setChecked(1)
def copygpanofromselected(self,qApp):
# First clean input fields
clear_exif_fields(self)
exiftool_params = ' -e -n -xmp:CroppedAreaImageHeightPixels -xmp:CroppedAreaImageWidthPixels -xmp:CroppedAreaLeftPixels -xmp:CroppedAreaTopPixels -xmp:FullPanoHeightPixels -xmp:FullPanoWidthPixels -xmp:ProjectionType -xmp:UsePanoramaViewer -xmp:PoseHeadingDegrees -xmp:InitialViewHeadingDegrees -xmp:InitialViewRollDegrees -xmp:InitialViewPitchDegrees -xmp:StitchingSoftware -xmp:InitialHorizontalFOVDegrees '
p = read_image_info(self, exiftool_params)
if len(p) == 0:
data = False
message = ("<p>You are trying to copy GPano (Google Photosphere) info from your source image, but your source image "
"doesn't contain the specified GPano data or doesn't seem to contain any GPano data (or you didn't select an image).</p>")
ret = QMessageBox.warning(self, "Error copying GPano info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
rowcounter = 0
for line in p_lines:
#try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
gpslat = 0
gpslon = 0
if descriptor == "Cropped Area Image Height Pixels":
self.xmp_CroppedAreaImageHeightPixels.setText(description)
if descriptor == "Cropped Area Image Width Pixels":
self.xmp_CroppedAreaImageWidthPixels.setText(description)
if descriptor == "Cropped Area Left Pixels":
self.xmp_CroppedAreaLeftPixels.setText(description)
if descriptor == "Cropped Area Top Pixels":
self.xmp_CroppedAreaTopPixels.setText(description)
if descriptor == "Full Pano Height Pixels":
self.xmp_FullPanoHeightPixels.setText(description)
if descriptor == "Full Pano Width Pixels":
self.xmp_FullPanoWidthPixels.setText(description)
if descriptor == "Projection Type":
if description == "equirectangular":
self.xmp_ProjectionType.setCurrentIndex(0)
elif description == "equirectangular":
self.xmp_ProjectionType.setCurrentIndex(1)
elif description == "rectilinear":
self.xmp_ProjectionType.setCurrentIndex(2)
if descriptor == "Use Panorama Viewer":
if description == "True":
self.xmp_UsePanoramaViewer.setChecked(1)
else:
self.xmp_UsePanoramaViewer.setChecked(0)
if descriptor == "Pose Heading Degrees":
self.xmp_PoseHeadingDegrees.setText(description)
if descriptor == "Initial View Heading Degrees":
self.xmp_InitialViewHeadingDegrees.setText(description)
if descriptor == "Initial View Pitch Degrees":
self.xmp_InitialViewPitchDegrees.setText(description)
if descriptor == "Initial View Roll Degrees":
self.xmp_InitialViewRollDegrees.setText(description)
if descriptor == "Stitching Software":
self.xmp_StitchingSoftware.setText(description)
if descriptor == "Initial Horizontal FOV Degrees":
self.xmp_InitialHorizontalFOVDegrees.setText(description)
#print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
def savegpanodata(self, qApp):
exiftool_params = ""
if self.chk_xmp_CroppedAreaImageHeightPixels.isChecked():
exiftool_params = ' -xmp:CroppedAreaImageHeightPixels="' + self.xmp_CroppedAreaImageHeightPixels.text() + '" '
if self.chk_xmp_CroppedAreaImageWidthPixels.isChecked():
exiftool_params += '-xmp:CroppedAreaImageWidthPixels="' + self.xmp_CroppedAreaImageWidthPixels.text() + '" '
if self.chk_xmp_CroppedAreaLeftPixels.isChecked():
exiftool_params += '-xmp:CroppedAreaLeftPixels="' + self.xmp_CroppedAreaLeftPixels.text() + '" '
if self.chk_xmp_CroppedAreaTopPixels.isChecked():
exiftool_params += '-xmp:CroppedAreaTopPixels="' + self.xmp_CroppedAreaTopPixels.text() + '" '
if self.chk_xmp_FullPanoHeightPixels.isChecked():
exiftool_params += '-xmp:FullPanoHeightPixels="' + self.xmp_FullPanoHeightPixels.text() + '" '
if self.chk_xmp_FullPanoWidthPixels.isChecked():
exiftool_params += '-xmp:FullPanoWidthPixels="' + self.xmp_FullPanoWidthPixels.text() + '" '
if self.chk_xmp_ProjectionType.isChecked():
#print "projectiontype " + str(self.xmp_ProjectionType.currentIndex())
#print "projectiontype " + str(self.xmp_ProjectionType.currentText())
if self.xmp_ProjectionType.currentIndex() == 0:
exiftool_params += '-xmp:ProjectionType="equirectangular" '
elif self.xmp_ProjectionType.currentIndex() == 1:
exiftool_params += '-xmp:ProjectionType="cylindrical" '
elif self.xmp_ProjectionType.currentIndex() == 2:
exiftool_params += '-xmp:ProjectionType="rectangular" '
if self.chk_xmp_UsePanoramaViewer.isChecked():
if self.xmp_UsePanoramaViewer.isChecked():
exiftool_params += '-xmp:UsePanoramaViewer=1 '
else:
exiftool_params += '-xmp:UsePanoramaViewer=0 '
if self.chk_xmp_PoseHeadingDegrees.isChecked():
exiftool_params += '-xmp:PoseHeadingDegrees="' + self.xmp_PoseHeadingDegrees.text() + '" '
if self.chk_xmp_InitialViewHeadingDegrees.isChecked():
exiftool_params += '-xmp:InitialViewHeadingDegrees="' + self.xmp_InitialViewHeadingDegrees.text() + '" '
if self.chk_xmp_InitialViewPitchDegrees.isChecked():
exiftool_params += '-xmp:InitialViewPitchDegrees="' + self.xmp_InitialViewPitchDegrees.text() + '" '
if self.chk_xmp_InitialViewRollDegrees.isChecked():
exiftool_params += '-xmp:InitialViewRollDegrees="' + self.xmp_InitialViewRollDegrees.text() + '" '
if self.chk_xmp_StitchingSoftware.isChecked():
exiftool_params += '-xmp:StitchingSoftware="' + self.xmp_StitchingSoftware.text() + '" '
if self.chk_xmp_InitialHorizontalFOVDegrees.isChecked():
exiftool_params += '-xmp:InitialHorizontalFOVDegrees="' + self.xmp_InitialHorizontalFOVDegrees.text() + '" '
if self.chk_gpano_backuporiginals.isChecked():
write_image_info(self, exiftool_params, qApp, True)
else:
write_image_info(self, exiftool_params, qApp, False)
#------------------------------------------------------------------------
# Edit -> geotagging tab and actions
def geotag_source_folder(self, qApp):
self.statusbar.showMessage("")
select_folder = QFileDialog(self)
select_folder.setFileMode(QFileDialog.Directory)
qApp.processEvents()
if platform.system() == "Darwin":
select_folder.setDirectory(os.path.expanduser('~/Pictures'))
elif platform.system() == "Linux":
select_folder.setDirectory(os.path.expanduser('~/Pictures'))
elif platform.system() == "Windows":
select_folder.setDirectory(os.path.expanduser('~/My Pictures'))
select_folder.setViewMode(QFileDialog.Detail)
qApp.processEvents()
geotag_source_folder = ""
if select_folder.exec_():
geotag_source_folder = select_folder.selectedFiles()[0]
self.geotag_source_folder = geotag_source_folder
self.LineEdit_geotag_source_folder.setText(geotag_source_folder)
#print(str(self.geotag_source_folder))
# button to write can be enabled
self.btn_write_geotaginfo.setEnabled(True)
else:
# user canceled
self.statusbar.showMessage("you canceled selecting a folder for geotagging.")
geotag_source_folder = ""
def geotag_gps_file(self, qApp):
self.statusbar.showMessage("")
select_file = QFileDialog(self,"Open gpx track log file")
select_file.setFileMode(QFileDialog.ExistingFiles)
qApp.processEvents()
if platform.system() == "Darwin":
select_file.setDirectory(os.path.expanduser('~/Pictures'))
elif platform.system() == "Linux":
select_file.setDirectory(os.path.expanduser('~/Pictures'))
elif platform.system() == "Windows":
select_file.setDirectory(os.path.expanduser('~/My Pictures'))
qApp.processEvents()
select_file.setViewMode(QFileDialog.Detail)
#select_file.setNameFilter("gpx track log files (*.gpx *.GPX *.log *.LOG)\nAll files (*.*)")
geotag_gps_file = ""
if select_file.exec_():
print("select file exec")
geotag_gps_file = select_file.selectedFiles()[0]
self.geotag_gps_file = geotag_gps_file
print("file should be selected")
self.LineEdit_geotag_log_file.setText(geotag_gps_file)
#print(str(self.geotag_gps_file))
else:
# user canceled
self.statusbar.showMessage("you canceled selecting the GPS track log file.")
geotag_gps_file = ""
#------------------------------------------------------------------------
# Edit -> Lens tab and actions
def check_self_defined_lenses(self):
if self.chk_predefined_lenses.isChecked():
self.predefined_lenses.setEnabled(True)
self.btn_save_lens.setEnabled(False)
self.btn_update_lens.setEnabled(True)
self.btn_delete_lens.setEnabled(True)
else:
self.predefined_lenses.setEnabled(False)
self.btn_save_lens.setEnabled(True)
self.btn_update_lens.setEnabled(False)
self.btn_delete_lens.setEnabled(False)
def clear_lens_fields(self):
self.lens_make.setText("")
self.lens_model.setText("")
self.lens_serialnumber.setText("")
self.lens_focallength.setText("")
self.lens_focallengthin35mmformat.setText("")
self.lens_maxaperturevalue.setText("")
self.lens_fnumber.setText("")
self.lens_meteringmode.setCurrentIndex(0)
self.chk_lens_make.setChecked(1)
self.chk_lens_model.setChecked(1)
self.chk_lens_serialnumber.setChecked(1)
self.chk_lens_focallength.setChecked(1)
self.chk_lens_focallengthin35mmformat.setChecked(1)
self.chk_lens_maxaperturevalue.setChecked(1)
self.chk_lens_fnumber.setChecked(1)
self.chk_lens_meteringmode.setChecked(1)
def copylensfromselected(self,qApp):
# First clean input fields
clear_lens_fields(self)
lenstool_params = ' -s -n -exif:lensmake -exif:lensmodel -exif:lensserialnumber -exif:focallength -exif:focallengthIn35mmformat -exif:fnumber -exif:maxaperturevalue -exif:meteringmode '
p = read_image_info(self, lenstool_params)
print (" lensparameters read " + str(p))
if len(p) == 0:
# data = False
message = ("<p>You are trying to copy lens info from your source image, but your source image "
"doesn't contain the specified lens data or doesn't seem to contain any lens data (or you didn't select an image).</p>")
ret = QMessageBox.warning(self, "Error copying lens info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
rowcounter = 0
for line in p_lines:
#try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
# gpslat = 0
# gpslon = 0
if descriptor == "LensMake":
self.lens_make.setText(description)
if descriptor == "LensModel":
self.lens_model.setText(description)
if descriptor == "LensSerialNumber":
self.lens_serialnumber.setText(description)
if descriptor == "FocalLength":
self.lens_focallength.setText(description)
if descriptor == "FocalLengthIn35mmFormat":
self.lens_focallengthin35mmformat.setText(description)
if descriptor == "MaxApertureValue":
self.lens_maxaperturevalue.setText(description)
if descriptor == "FNumber":
self.lens_fnumber.setText(description)
if descriptor == "MeteringMode":
self.lens_meteringmode.setCurrentIndex(int(description))
#print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
def savelensdata(self, qApp):
# This function saves the lens data into the image
lenstool_params = ""
if self.chk_lens_make.isChecked():
lenstool_params = ' -exif:lensmake="' + self.lens_make.text() + '" -xmp:lensmake="' + self.lens_make.text() + '" '
if self.chk_lens_model.isChecked():
lenstool_params += '-exif:lensmodel="' + self.lens_model.text() + '" -xmp:lensmodel="' + self.lens_model.text() + '" '
if self.chk_lens_serialnumber.isChecked():
lenstool_params += '-exif:lensserialnumber="' + self.lens_serialnumber.text() + '" -xmp:lensserialnumber="' + self.lens_serialnumber.text() + '" '
if self.chk_lens_focallength.isChecked():
lenstool_params += '-exif:focallength="' + self.lens_focallength.text() + '" -xmp:focallength="' + self.lens_focallength.text() + '" '
if self.chk_lens_focallengthin35mmformat.isChecked():
lenstool_params += '-exif:focallengthin35mmformat="' + self.lens_focallengthin35mmformat.text() + '" -xmp:focallengthin35mmformat="' + self.lens_focallengthin35mmformat.text() + '" '
if self.chk_lens_maxaperturevalue.isChecked():
lenstool_params += '-exif:maxaperturevalue="' + self.lens_maxaperturevalue.text() + '" -xmp:maxaperturevalue="' + self.lens_maxaperturevalue.text() + '" '
if self.chk_lens_fnumber.isChecked():
lenstool_params += '-exif:fnumber="' + self.lens_fnumber.text() + '" -xmp:fnumber="' + self.lens_fnumber.text() + '" '
if self.chk_lens_meteringmode.isChecked():
if self.lens_meteringmode.currentIndex() == 0:
meteringmode = "Unknown"
elif self.lens_meteringmode.currentIndex() == 1:
meteringmode = "Average"
elif self.lens_meteringmode.currentIndex() == 2:
meteringmode = "Center-weighted average"
elif self.lens_meteringmode.currentIndex() == 3:
meteringmode = "Spot"
elif self.lens_meteringmode.currentIndex() == 4:
meteringmode = "Multi-spot"
elif self.lens_meteringmode.currentIndex() == 5:
meteringmode = "Multi-segment"
elif self.lens_meteringmode.currentIndex() == 6:
meteringmode = "Partial"
elif self.lens_meteringmode.currentIndex() == 255:
meteringmode = "Other"
# lenstool_params += '-exif:meteringmode=' + str(self.lens_meteringmode.currentIndex()) + ' -xmp:meteringmode=' + str(self.lens_meteringmode.currentIndex()) + ' '
lenstool_params += '-exif:meteringmode="' + meteringmode + '" -xmp:meteringmode="' + meteringmode + '" '
print("lenstool_params " + lenstool_params)
if self.chk_lens_backuporiginals.isChecked():
write_image_info(self, lenstool_params, qApp, True)
else:
write_image_info(self, lenstool_params, qApp, False)
def definedlenschanged(self, qApp):
tempstr = lambda val: '' if val is None else val
clear_lens_fields(self)
for lens in self.lensdbroot:
if lens.attrib["name"] == self.predefined_lenses.currentText():
self.lens_make.setText(str(lens.find('make').text))
self.lens_model.setText(str(lens.find('model').text))
self.lens_serialnumber.setText(str(tempstr(lens.find('serialnumber').text)))
self.lens_focallength.setText(str(tempstr(lens.find('focallength').text)))
self.lens_focallengthin35mmformat.setText(str(tempstr(lens.find('focallengthin35mmformat').text)))
self.lens_fnumber.setText(str(tempstr(lens.find('fnumber').text)))
self.lens_maxaperturevalue.setText(str(tempstr(lens.find('maxaperturevalue').text)))
#print(str(self.lensdb))
def updatelens(self, qApp):
print('update lens data for this lens inside the lens database')
tempstr = lambda val: '' if val is None else val
self.lens_current_index = self.predefined_lenses.currentIndex()
for lens in self.lensdbroot:
if lens.attrib["name"] == self.predefined_lenses.currentText():
for tags in lens.iter('make'):
tags.text = self.lens_make.text()
for tags in lens.iter('model'):
tags.text = self.lens_model.text()
for tags in lens.iter('serialnumber'):
tags.text = self.lens_serialnumber.text()
for tags in lens.iter('focallength'):
tags.text = self.lens_focallength.text()
for tags in lens.iter('focallengthin35mmformat'):
tags.text = self.lens_focallengthin35mmformat.text()
for tags in lens.iter('maxaperturevalue'):
tags.text = self.lens_maxaperturevalue.text()
for tags in lens.iter('fnumber'):
tags.text = self.lens_fnumber.text()
'''if self.lens_meteringmode.currentIndex() == 0:
meteringmode = "Unknown"
elif self.lens_meteringmode.currentIndex() == 1:
meteringmode = "Average"
elif self.lens_meteringmode.currentIndex() == 2:
meteringmode = "Center-weighted average"
elif self.lens_meteringmode.currentIndex() == 3:
meteringmode = "Spot"
elif self.lens_meteringmode.currentIndex() == 4:
meteringmode = "Multi-spot"
elif self.lens_meteringmode.currentIndex() == 5:
meteringmode = "Multi-segment"
elif self.lens_meteringmode.currentIndex() == 6:
meteringmode = "Partial"
elif self.lens_meteringmode.currentIndex() == 255:
meteringmode = "Other"
for tags in lens.iter('meteringmodel'):
tags.text = meteringmode'''
petgfilehandling.write_lensdb_xml(self, qApp)
petgfilehandling.read_defined_lenses(self, qApp)
#------------------------------------------------------------------------
# Edit -> Iptc tab and actions
def clear_iptc_fields(self):
self.iptc_keywords.setText("")
self.chk_iptc_keywords.setChecked(1)
def copyiptcfromselected(self,qApp):
# First clean input fields
clear_iptc_fields(self)
exiftool_params = ' -e -n -iptc:Keywords '
p = read_image_info(self, exiftool_params)
if len(p) == 0:
data = False
message = ("<p>You are trying to copy iptc info from your source image, but your source image "
"doesn't contain the specified iptc data or doesn't seem to contain any iptc data (or you didn't select an image).</p>")
ret = QMessageBox.warning(self, "Error copying iptc info from source image", message)
else:
# remove last character which is the final ending \n (where \ is only the escape character)
p = p[:-1]
p_lines = re.split('\n',p)
rowcounter = 0
for line in p_lines:
#try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
if descriptor == "Keywords":
self.iptc_keywords.setText(description)
#print "rowcounter " + str(rowcounter) + " descriptor " + descriptor + " ;description " + description
rowcounter += 1
def saveiptcdata(self, qApp):
exiftool_params = ""
if self.chk_iptc_keywords.isChecked():
exiftool_params = ' -iptc:Keywords="' + self.iptc_keywords.text() + '" '
write_image_info(self, exiftool_params, qApp, self.chk_iptc_backuporiginals.isChecked())
#---
def date_to_datetimeoriginal(self, qApp):
exiftool_params = " -FileModifyDate<DateTimeOriginal "
message = "If you have modified your images in a \"sloppy\" image editor or copied them around or whatever other action(s), the file "
message += "date/time of your images might have changed to the date your did the action/modification on the image "
message += "where as the real file date (= creation date) of your images is most certainly (much) older.\n"
message += "This function will take the original date/time when the photo was taken from the exif:DateTimeOriginal "
message += "and use that (again) as file date/time.\n\n"
message += "Do you want to continue?"
reply = QMessageBox.question(self, "Set file date/time to DateTimeOriginal?", message, QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.Yes:
write_image_info(self, exiftool_params, qApp, False)
#------------------------------------------------------------------------
# Other dialogs and windows and their related functions
def info_window(self):
if self.OSplatform == "Windows":
if os.path.isfile(os.path.join(self.parent_dir, 'COPYING')):
# started from python
license_file = os.path.join(self.parent_dir, 'COPYING')
elif os.path.isfile(os.path.join(self.realfile_dir, 'COPYING')):
# Started from the executable
license_file = os.path.join(self.realfile_dir, 'COPYING')
else:
QMessageBox.critical(self, "Can't find the license file", "Please check www.gnu.org/license")
elif self.OSplatform == "Darwin":
if os.path.isfile(os.path.join(self.parent_dir, 'COPYING')):
# started from python
license_file = os.path.join(self.parent_dir, 'COPYING')
elif os.path.isfile(os.path.join(self.realfile_dir, "pyexiftoolgui.app","Contents","Resources","COPYING")):
# Started from the executable
license_file = os.path.join(self.realfile_dir,"pyexiftoolgui.app","Contents","Resources",'COPYING')
else:
QMessageBox.critical(self, "Can't find the license file", "Please check www.gnu.org/license")
else:
license_file = os.path.join(self.parent_dir, 'COPYING')
self.info_window_dialog = QDialog()
self.info_window_dialog.resize(500, 640)
self.info_window_text = QTextEdit(self.info_window_dialog)
self.info_window_text.setGeometry(QRect(3, 11, 491, 591))
self.info_window_text.setObjectName("info_window_text")
self.buttonBox = QDialogButtonBox(self.info_window_dialog)
self.buttonBox.setGeometry(QRect(300, 610, 176, 27))
self.buttonBox.setStandardButtons(QDialogButtonBox.Close)
self.buttonBox.setObjectName("buttonBox")
self.info_window_dialog.setWindowTitle(programinfo.NAME + " " + programinfo.VERSION + " license")
self.info_window_text.setText(open(license_file).read())
QObject.connect(self.buttonBox, SIGNAL("clicked(QAbstractButton*)"), self.info_window_dialog.close)
QMetaObject.connectSlotsByName(self.info_window_dialog)
self.info_window_dialog.exec_()
#---
class dialog_synchronizedatetime(QDialog, Ui_SyncDateTimeTagsDialog):
# This loads the py file created by pyside-uic from the ui.
# the Quiloader segfaults on windows after ending the function
def __init__(self, parent=None):
super(dialog_synchronizedatetime, self).__init__(parent)
self.setupUi(self)
def synchronizedatetime(self, qApp):
self.synchronizedatetime_dialog = dialog_synchronizedatetime()
#---
def qddt_shift_clicked(self):
if self.modifydatetime_dialog.chk_qddt_shift.isChecked():
self.modifydatetime_dialog.qddt_modifydate.setEnabled(False)
self.modifydatetime_dialog.qddt_datetimeoriginal.setEnabled(False)
self.modifydatetime_dialog.qddt_createdate.setEnabled(False)
else:
self.modifydatetime_dialog.qddt_modifydate.setEnabled(True)
self.modifydatetime_dialog.qddt_datetimeoriginal.setEnabled(True)
self.modifydatetime_dialog.qddt_createdate.setEnabled(True)
def qddt_use_reference_image_data(self):
if self.modifydatetime_dialog.chk_qddt_use_referencedata.isChecked():
exiftool_params = " -exif:ModifyDate -exif:DateTimeOriginal -exif:CreateDate "
if self.OSplatform == "Windows":
self.referenceimage = self.referenceimage.replace("/", "\\")
args = '"' + self.exiftoolprog + '" -a ' + exiftool_params + ' ' + self.referenceimage
p = subprocess.check_output(args, universal_newlines=True, shell=True)
else:
command_line = '"' + self.exiftoolprog + '" -a ' + exiftool_params + ' ' + self.referenceimage
args = shlex.split(command_line)
p = subprocess.check_output(args, universal_newlines=True)
p = p[:-1]
p_lines = re.split('\n',p)
for line in p_lines:
try:
descriptor, description = re.split(':', line,1)
descriptor = descriptor.strip()
description = description.strip()
if descriptor == "Modify Date":
modifydate = description
self.modifydatetime_dialog.qddt_modifydate.setText(modifydate)
if descriptor == "Date/Time Original":
datetimeoriginal = description
self.modifydatetime_dialog.qddt_datetimeoriginal.setText(datetimeoriginal)
if descriptor == "Create Date":
createdate = description
self.modifydatetime_dialog.qddt_createdate.setText(createdate)
except:
print("always the last line that doesn't work")
else:
now = datetime.datetime.now()
strnow = now.strftime("%Y:%m:%d %H:%M:%S")
self.modifydatetime_dialog.qddt_modifydate.setText(strnow)
self.modifydatetime_dialog.qddt_datetimeoriginal.setText(strnow)
self.modifydatetime_dialog.qddt_createdate.setText(strnow)
class dialog_modifydatetime(QDialog, Ui_DateTimeDialog):
# This loads the py file created by pyside-uic from the ui.
# the Quiloader segfaults on windows after ending the function
def __init__(self, parent=None):
super(dialog_modifydatetime, self).__init__(parent)
self.setupUi(self)
def modifydatetime(self, qApp):
self.modifydatetime_dialog = dialog_modifydatetime()
now = datetime.datetime.now()
strnow = now.strftime("%Y:%m:%d %H:%M:%S")
self.modifydatetime_dialog.qddt_modifydate.setText(strnow)
self.modifydatetime_dialog.qddt_datetimeoriginal.setText(strnow)
self.modifydatetime_dialog.qddt_createdate.setText(strnow)
self.modifydatetime_dialog.qddt_shiftdatetime.setText("0000:00:00 00:00:00")
# Set proper event
self.modifydatetime_dialog.chk_qddt_shift.clicked.connect(self.moddialog_shift_clicked)
self.modifydatetime_dialog.chk_qddt_use_referencedata.clicked.connect(self.moddialog_use_reference_image_data)
if self.modifydatetime_dialog.exec_() == QDialog.Accepted:
print("You selected Save")
if self.modifydatetime_dialog.chk_qddt_shift.isChecked():
# we will do a date/time shift
if self.modifydatetime_dialog.qddt_shiftdatetime.text() == "0000:00:00 00:00:00":
QMessageBox.information(self,"No shift value set", "You selected the shift function but you left the value at \"0000:00:00 00:00:00\".\nI can't do anything. ")
# exit function
return
else:
print(self.modifydatetime_dialog.qddt_shiftdatetime.text())
# We will first build the parameter string and then check for forward or backward timeshift and simply use
# a string replace on the already created exiftool_parameters string
exiftool_params = ""
if self.modifydatetime_dialog.chk_qddt_datetimeoriginal.isChecked():
exiftool_params += " \"-exif:DateTimeOriginal-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += " \"-xmp:DateTimeOriginal-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_modifydate.isChecked():
exiftool_params += " \"-exif:ModifyDate-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += " \"-xmp:ModifyDate-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_createdate.isChecked():
exiftool_params += " \"-exif:CreateDate-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += " \"-xmp:DateTimeDigitized-=" + self.modifydatetime_dialog.qddt_shiftdatetime.text() + "\" "
if self.modifydatetime_dialog.chk_qddt_forward.isChecked():
print("we are going to shift date and time forward")
exiftool_params = exiftool_params.replace("-=", "+=")
write_image_info(self, exiftool_params, qApp, False)
else:
# Update the selected date time fields, so no date/time shift
if self.modifydatetime_dialog.chk_qddt_modifydate.isChecked():
print("-exif:ModifyDate " + self.modifydatetime_dialog.qddt_modifydate.text())
exiftool_params = '-exif:ModifyDate="' + self.modifydatetime_dialog.qddt_modifydate.text() + '" '
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += '-xmp:ModifyDate="' + self.modifydatetime_dialog.qddt_modifydate.text() + '" '
if self.modifydatetime_dialog.chk_qddt_datetimeoriginal.isChecked():
print(self.modifydatetime_dialog.qddt_datetimeoriginal.text())
exiftool_params += '-exif:DateTimeOriginal="' + self.modifydatetime_dialog.qddt_datetimeoriginal.text() + '" '
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += '-xmp:DateTimeOriginal="' + self.modifydatetime_dialog.qddt_datetimeoriginal.text() + '" '
if self.modifydatetime_dialog.chk_qddt_createdate.isChecked():
print(self.modifydatetime_dialog.qddt_createdate.text())
exiftool_params += '-exif:CreateDate="' + self.modifydatetime_dialog.qddt_createdate.text() + '" '
if self.modifydatetime_dialog.chk_qddt_updatexmp.isChecked():
exiftool_params += '-xmp:DateTimeDigitized="' + self.modifydatetime_dialog.qddt_createdate.text() + '" '
print(exiftool_params)
write_image_info(self, exiftool_params, qApp, False)
else:
print("you cancelled")
self.statusbar.showMessage("you canceled the \"Modification of date/time\" action")
#---
def check_create_args_boxes(self):
if self.create_args_dialog.qdca_chk_args_all_metadata.isChecked():
self.create_args_dialog.qdca_chk_args_exif_data.setChecked(1)
self.create_args_dialog.qdca_chk_args_xmp_data.setChecked(1)
self.create_args_dialog.qdca_chk_args_gps_data.setChecked(1)
self.create_args_dialog.qdca_chk_args_iptc_data.setChecked(1)
self.create_args_dialog.qdca_chk_args_iccprofile_data.setChecked(1)
else:
self.create_args_dialog.qdca_chk_args_exif_data.setChecked(0)
self.create_args_dialog.qdca_chk_args_xmp_data.setChecked(0)
self.create_args_dialog.qdca_chk_args_gps_data.setChecked(0)
self.create_args_dialog.qdca_chk_args_iptc_data.setChecked(0)
self.create_args_dialog.qdca_chk_args_iccprofile_data.setChecked(0)
class dialog_create_args(QDialog, Ui_Dialog_create_args):
# This loads the py file created by pyside-uic from the ui.
# the Quiloader segfaults on windows after ending the function
def __init__(self, parent=None):
super(dialog_create_args, self).__init__(parent)
self.setupUi(self)
print("create arguments file(s) from selected image(s)")
def create_args(self, qApp):
self.create_args_dialog = dialog_create_args()
# Set proper event
self.create_args_dialog.qdca_chk_args_all_metadata.clicked.connect(self.check_create_args_boxes)
if self.create_args_dialog.exec_() == QDialog.Accepted:
message = "You selected:\n\n"
empty_selection = 0
if self.create_args_dialog.qdca_chk_args_all_metadata.isChecked():
print("Add all metadata to args file(s)")
message += "- Add all metadata\n"
et_param = " -a -all "
else:
empty_selection = 1
et_param = ""
if self.create_args_dialog.qdca_chk_args_exif_data.isChecked():
print("Add exif data to args file(s)")
message += "- Add exif data\n"
et_param += " -a -exif:all "
empty_selection = 0
if self.create_args_dialog.qdca_chk_args_xmp_data.isChecked():
print("Add xmp data to args file(s)")
message += "- Add xmp data\n"
et_param += " -a -xmp:all "
empty_selection = 0
if self.create_args_dialog.qdca_chk_args_gps_data.isChecked():
print("Add gps data to args file(s)")
message += "- Add gps data\n"
et_param += " -a -gps:all "
empty_selection = 0
if self.create_args_dialog.qdca_chk_args_iptc_data.isChecked():
print("Add iptc data to args file(s)")
message += "- Add iptc data\n"
et_param += " -a -iptc:all "
empty_selection = 0
if self.create_args_dialog.qdca_chk_args_iccprofile_data.isChecked():
print("Add icc profile data to args file(s)")
message += "- Add icc profile data\n"
et_param += " -a -icc_profile:all "
empty_selection = 0
if empty_selection == 1:
QMessageBox.information(self,"Nothing selected", "You selected nothing. Cancel would have been the correct option.\nNothing will we done.")
else:
message += "\nAre you sure you want to add the above metadata from the selected image(s) to your args file(s)?"
ret = QMessageBox.question(self, "Add metadata from image(s) to args file(s)", message, buttons=QMessageBox.Ok|QMessageBox.Cancel)
if ret == QMessageBox.Ok:
print("User wants to continue")
et_param += " -args --filename --directory -w args "
print(et_param)
write_image_info(self, et_param, qApp, False)
else:
self.statusbar.showMessage("you canceled the \"Export metadata to args file(s)\" action")
else:
print("you cancelled")
self.statusbar.showMessage("you canceled the \"Export metadata to args file(s)\" action")
#---
def check_export_metadata_boxes(self):
# This one checks whether "export all" is checked
if self.export_metadata_dialog.qdem_chk_export_all_metadata.isChecked():
self.export_metadata_dialog.qdem_chk_export_exif_data.setChecked(1)
self.export_metadata_dialog.qdem_chk_export_xmp_data.setChecked(1)
self.export_metadata_dialog.qdem_chk_export_gps_data.setChecked(1)
self.export_metadata_dialog.qdem_chk_export_iptc_data.setChecked(1)
self.export_metadata_dialog.qdem_chk_export_iccprofile_data.setChecked(1)
else:
self.export_metadata_dialog.qdem_chk_export_exif_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_xmp_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_gps_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_iptc_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_iccprofile_data.setChecked(0)
def check_xmpexport_metadata_boxes(self):
# This one checks whether the xmp export file is checked
#print "in the check_xmpexport_metadata_boxes"
if self.export_metadata_dialog.qdem_xmp_radiobutton.isChecked():
self.export_metadata_dialog.qdem_chk_export_all_metadata.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_exif_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_xmp_data.setChecked(1)
self.export_metadata_dialog.qdem_chk_export_gps_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_iptc_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_iccprofile_data.setChecked(0)
self.export_metadata_dialog.qdem_chk_export_all_metadata.setEnabled(False)
self.export_metadata_dialog.qdem_chk_export_exif_data.setEnabled(False)
self.export_metadata_dialog.qdem_chk_export_gps_data.setEnabled(False)
self.export_metadata_dialog.qdem_chk_export_iptc_data.setEnabled(False)
self.export_metadata_dialog.qdem_chk_export_iccprofile_data.setEnabled(False)
else:
self.export_metadata_dialog.qdem_chk_export_all_metadata.setEnabled(True)
self.export_metadata_dialog.qdem_chk_export_exif_data.setEnabled(True)
self.export_metadata_dialog.qdem_chk_export_xmp_data.setEnabled(True)
self.export_metadata_dialog.qdem_chk_export_gps_data.setEnabled(True)
self.export_metadata_dialog.qdem_chk_export_iptc_data.setEnabled(True)
self.export_metadata_dialog.qdem_chk_export_iccprofile_data.setEnabled(True)
class dialog_export_metadata(QDialog, Ui_Dialog_export_metadata):
# This loads the py file created by pyside-uic from the ui.
# the Quiloader segfaults on windows after ending the function
def __init__(self, parent=None):
super(dialog_export_metadata, self).__init__(parent)
self.setupUi(self)
print("create arguments file(s) from selected image(s)")
def export_metadata(self, qApp):
self.export_metadata_dialog = dialog_export_metadata()
# Set proper events
self.export_metadata_dialog.qdem_chk_export_all_metadata.clicked.connect(self.check_export_metadata_boxes)
self.export_metadata_dialog.qdem_txt_radiobutton.clicked.connect(self.check_xmpexport_metadata_boxes)
self.export_metadata_dialog.qdem_tab_radiobutton.clicked.connect(self.check_xmpexport_metadata_boxes)
self.export_metadata_dialog.qdem_xml_radiobutton.clicked.connect(self.check_xmpexport_metadata_boxes)
self.export_metadata_dialog.qdem_html_radiobutton.clicked.connect(self.check_xmpexport_metadata_boxes)
self.export_metadata_dialog.qdem_xmp_radiobutton.clicked.connect(self.check_xmpexport_metadata_boxes)
if self.export_metadata_dialog.exec_() == QDialog.Accepted:
message = "You selected:\n\n"
empty_selection = 0
if self.export_metadata_dialog.qdem_chk_export_all_metadata.isChecked():
print("export all metadata")
message += "- export all metadata\n"
et_param = " -a -all "
else:
empty_selection = 1
et_param = ""
if self.export_metadata_dialog.qdem_chk_export_exif_data.isChecked():
print("export exif data")
message += "- export exif data\n"
et_param += " -a -exif:all "
empty_selection = 0
if self.export_metadata_dialog.qdem_chk_export_xmp_data.isChecked():
print("export xmp data")
message += "- export xmp data\n"
et_param += " -a -xmp:all "
empty_selection = 0
if self.export_metadata_dialog.qdem_chk_export_gps_data.isChecked():
print("export gps data")
message += "- export gps data\n"
et_param += " -a -gps:all "
empty_selection = 0
if self.export_metadata_dialog.qdem_chk_export_iptc_data.isChecked():
print("export iptc data")
message += "- export iptc data\n"
et_param += " -a -iptc:all "
empty_selection = 0
if self.export_metadata_dialog.qdem_chk_export_iccprofile_data.isChecked():
print("export icc profile data")
message += "- export icc profile data\n"
et_param += " -a -icc_profile:all "
empty_selection = 0
if empty_selection == 1:
QMessageBox.information(self,"Nothing selected", "You selected nothing. Cancel would have been the correct option.\nNothing will we done.")
else:
message += "\nAre you sure you want to export the above metadata from the selected image(s)?"
ret = QMessageBox.question(self, "export metadata from image(s)", message, buttons=QMessageBox.Ok|QMessageBox.Cancel)
if ret == QMessageBox.Ok:
print("User wants to continue")
print(et_param)
if self.export_metadata_dialog.qdem_txt_radiobutton.isChecked():
et_param += " -w! txt "
elif self.export_metadata_dialog.qdem_tab_radiobutton.isChecked():
et_param += " -t -w! txt "
elif self.export_metadata_dialog.qdem_xml_radiobutton.isChecked():
et_param += " -X -w! xml "
elif self.export_metadata_dialog.qdem_html_radiobutton.isChecked():
et_param += " -h -w! html "
elif self.export_metadata_dialog.qdem_xmp_radiobutton.isChecked():
et_param = " xmpexport "
elif self.export_metadata_dialog.qdem_csv_radiobutton.isChecked():
et_param += " -csv "
write_image_info(self, et_param, qApp, False)
else:
self.statusbar.showMessage("you canceled the \"Export of metadata\" action")
else:
print("you cancelled")
self.statusbar.showMessage("you canceled the \"Export of metadata\" action")
#---
def check_remove_metadata_boxes(self):
if self.rem_metadata_dialog.chk_rem_all_metadata.isChecked():
self.rem_metadata_dialog.chk_rem_exif_data.setChecked(1)
self.rem_metadata_dialog.chk_rem_xmp_data.setChecked(1)
self.rem_metadata_dialog.chk_rem_gps_data.setChecked(1)
self.rem_metadata_dialog.chk_rem_iptc_data.setChecked(1)
self.rem_metadata_dialog.chk_rem_iccprofile_data.setChecked(1)
else:
self.rem_metadata_dialog.chk_rem_exif_data.setChecked(0)
self.rem_metadata_dialog.chk_rem_xmp_data.setChecked(0)
self.rem_metadata_dialog.chk_rem_gps_data.setChecked(0)
self.rem_metadata_dialog.chk_rem_iptc_data.setChecked(0)
self.rem_metadata_dialog.chk_rem_iccprofile_data.setChecked(0)
class dialog_remove_metadata(QDialog, Ui_Dialog_remove_metadata):
# This loads the py file created by pyside-uic from the ui.
# the Quiloader segfaults on windows after ending the function
def __init__(self, parent=None):
super(dialog_remove_metadata, self).__init__(parent)
self.setupUi(self)
def remove_metadata(self, qApp):
self.rem_metadata_dialog = dialog_remove_metadata()
# Set proper event
self.rem_metadata_dialog.chk_rem_all_metadata.clicked.connect(self.check_remove_metadata_boxes)
if self.rem_metadata_dialog.exec_() == QDialog.Accepted:
message = "You selected:\n\n"
empty_selection = 0
if self.rem_metadata_dialog.chk_rem_all_metadata.isChecked():
print("Remove all metadata")
message += "- Remove all metadata\n"
et_param = " -all= "
else:
empty_selection = 1
et_param = ""
if self.rem_metadata_dialog.chk_rem_exif_data.isChecked():
print("Remove exif data")
message += "- Remove exif data\n"
et_param += " -exif:all= "
empty_selection = 0
if self.rem_metadata_dialog.chk_rem_xmp_data.isChecked():
print("Remove xmp data")
message += "- Remove xmp data\n"
et_param += " -xmp:all= "
empty_selection = 0
if self.rem_metadata_dialog.chk_rem_gps_data.isChecked():
print("Remove gps data")
message += "- Remove gps data\n"
et_param += " -gps:all= "
empty_selection = 0
if self.rem_metadata_dialog.chk_rem_iptc_data.isChecked():
print("Remove iptc data")
message += "- Remove iptc data\n"
et_param += " -iptc:all= "
empty_selection = 0
if self.rem_metadata_dialog.chk_rem_iccprofile_data.isChecked():
print("Remove icc profile data")
message += "- Remove icc profile data\n"
et_param += " -icc_profile:all= "
empty_selection = 0
if empty_selection == 1:
QMessageBox.information(self,"Nothing selected", "You selected nothing. Cancel would have been the correct option.\nNothing will we done.")
else:
message += "\nAre you sure you want to remove the above metadata from the selected image(s)?"
ret = QMessageBox.question(self, "Remove metadata from image(s)", message, buttons=QMessageBox.Ok|QMessageBox.Cancel)
if ret == QMessageBox.Ok:
print("User wants to continue")
print(et_param)
if self.rem_metadata_dialog.chk_rem_backuporiginals.isChecked():
print("make backup of originals")
write_image_info(self, et_param, qApp, True)
else:
write_image_info(self, et_param, qApp, False)
else:
self.statusbar.showMessage("you canceled the \"Removal of metadata\" action")
else:
print("you cancelled")
self.statusbar.showMessage("you canceled the \"Removal of metadata\" action")
#------------------------------------------------------------------------
# This is the part where the geotag functions will be executed
def write_geotag_info(self,qApp):
# First check if we have something to work on
result = check_geotag_folder_before_run_geotag_photos(self)
if result == "nothing_to_work_with":
# error message already displayed, exit function
return
else:
# work_on gets the geotag folder or the main images screen selection
work_on = result
# Now check whether we have a GPS track log file
if self.LineEdit_geotag_log_file.text() == "":
# user did not specify a GPS track log file
QMessageBox.information(self,"No GPS track log file", "You did not select a GPS track log file\n. Cancelling this action")
return "nothing_to_work_with"
else:
# At this stage we have images and a track log file
run_geotag_photos(self, work_on, qApp)
#---
def check_geotag_folder_before_run_geotag_photos(self):
print("self.LineEdit_geotag_source_folder #" + self.LineEdit_geotag_source_folder.text() + "#")
if self.LineEdit_geotag_source_folder.text() == "":
# user did not select a source folder, now check in the except whether he/she selected images in the main screen
try:
#if len(self.fileNames) == 0:
selected_rows = self.MaintableWidget.selectedIndexes()
if len(selected_rows) == 0:
QMessageBox.information(self,"Nothing to work with","You did not specify a source folder and neither did you load/select any photos in the main screen.")
return "nothing_to_work_with"
else:
# just exit this function with the option "main_screen_selection"
print("main_screen_selection")
return "main_screen_selection"
except:
QMessageBox.information(self,"Nothing to work with","You did not specify a source folder and neither did you load/select any photos in the main screen.")
return "nothing_to_work_with"
else:
# just exit this function with the option rename_source_folder (this is not the path)
print("geotag_source_folder")
return "geotag_source_folder"
#---
def run_geotag_photos(self, work_on, qApp):
# Now do the real work
# Check whether user specified a geosync time
if self.LineEdit_geotagging_geosynctime.text() == "":
exiftoolparams = " -P -overwrite_original_in_place -geotag '" + self.LineEdit_geotag_log_file.text() + "'"
xmpparams = " -P -overwrite_original_in_place -xmp:geotag='" + self.LineEdit_geotag_log_file.text() + "'"
else:
# A geosync time has been specified. just make sure to remove extra quotes or double quotes
gstime = self.LineEdit_geotagging_geosynctime.text()
gstime = gstime.replace("'", "")
gstime = gstime.replace('"', '')
exiftoolparams = " -P -overwrite_original_in_place -geotag '" + self.LineEdit_geotag_log_file.text() + "' -geosync=" + gstime + " "
xmpparams = " -P -overwrite_original_in_place -xmp:geotag='" + self.LineEdit_geotag_log_file.text() + "' -geosync=" + gstime + " "
# final check
if work_on == "nothing_to_work_with":
# This should already been dealt with earlier, but in case I did something stupid we simply exit this function
return
elif work_on == "main_screen_selection":
# we use the images that were selected from the main screen
print("we use the images that were selected from the main screen")
selected_rows = self.MaintableWidget.selectedIndexes()
#exiftoolparams = "'-FileName<" + self.prefix + "_" + self.suffix + ".%le' " + self.prefixformat + " " + self.suffixformat + "-." + self.combobox_digits.currenttext() + "nc" + self.sourcefolder + "/*"
rowcounter = 0
total_rows = len(selected_rows)
self.progressbar.setRange(0, total_rows)
self.progressbar.setValue(0)
self.progressbar.show()
rows = []
qApp.processEvents()
for selected_row in selected_rows:
selected_row = str(selected_row)
selected_row = selected_row.replace("<PySide.QtCore.QModelIndex(",'')
selected_row, tail = re.split(',0x0',selected_row)
#print str(selected_row)
row, column = re.split(',',selected_row)
if row not in rows:
rows.append(row)
selected_image = "\"" + self.fileNames[int(row)] + "\""
print('exiftool ' + exiftoolparams + ' ' + selected_image)
rowcounter += 1
self.progressbar.setValue(rowcounter)
parameters = ' ' + exiftoolparams + ' ' + selected_image
xmpparameters = ' ' + xmpparams + ' ' + selected_image
self.statusbar.showMessage("Trying to geotag " + os.path.basename(selected_image))
qApp.processEvents()
if self.OSplatform in ("Windows", "win32"):
parameters = parameters.replace("/", "\\")
parameters = parameters.replace("'", "\"")
xmpparameters = xmpparameters.replace("/", "\\")
xmpparameters = xmpparameters.replace("'", "\"")
args = '"' + self.exiftoolprog + '" ' + parameters
xmpargs = '"' + self.exiftoolprog + '" ' + xmpparameters
print(args)
print(xmpargs)
p = subprocess.call(args, shell=True)
p = subprocess.call(xmpargs, shell=True)
else:
#parameters = parameters.replace("'", "\"")
command_line = '"' + self.exiftoolprog + '" ' + exiftoolparams + ' ' + selected_image
xmp_command_line = '"' + self.exiftoolprog + '" ' + xmpparams + ' ' + selected_image
args = shlex.split(command_line)
xmpargs = shlex.split(xmp_command_line)
print("command_line " + command_line)
print("xmp command_line " + xmp_command_line)
#p = subprocess.call(command_line)
p = subprocess.call(args)
p = subprocess.call(xmpargs)
self.statusbar.showMessage("Finished geotagging images where timestamps fit.")
qApp.processEvents()
self.progressbar.hide()
self.statusbar.showMessage("")
elif work_on == "geotag_source_folder":
# work on all images in the source folder and do it in this function self
#print "work on all images in the source folder"
#print self.rename_photos_dialog.LineEdit_rename_source_folder.text()
self.statusbar.showMessage("Trying to geotag all images in: " + self.LineEdit_geotag_source_folder.text())
print("Trying to geotag all images in: " + self.LineEdit_geotag_source_folder.text())
parameters = exiftoolparams + ' "' + self.LineEdit_geotag_source_folder.text() + '"'
xmpparameters = xmpparams + ' "' + self.LineEdit_geotag_source_folder.text() + '"'
if self.OSplatform in ("Windows", "win32"):
parameters = parameters.replace("/", "\\")
parameters = parameters.replace("'", "\"")
xmpparameters = xmpparameters.replace("/", "\\")
xmpparameters = xmpparameters.replace("'", "\"")
args = '"' + self.exiftoolprog + '" ' + parameters
xmpargs = '"' + self.exiftoolprog + '" ' + xmpparameters
print("args " + args)
print("xmpargs " + xmpargs)
p = subprocess.call(args, shell=True)
p = subprocess.call(xmpargs, shell=True)
else:
pathofimages = self.LineEdit_geotag_source_folder.text().replace(" ", "\\ ")
command_line = '"' + self.exiftoolprog + '" ' + exiftoolparams + ' "' + pathofimages + '"'
xmpcommand_line = '"' + self.exiftoolprog + '" ' + xmpparams + ' "' + pathofimages + '"'
print("command_line " + command_line)
print("xmpcommandline " + xmpcommand_line)
p = subprocess.call(command_line, shell=True)
p = subprocess.call(xmpcommand_line, shell=True)
self.statusbar.showMessage("Finished geotagging all images in: " + self.LineEdit_geotag_source_folder.text() + " where timestamps fit.")
#------------------------------------------------------------------------
# This is the part where your own exiftool parameters will be executed
def yourcommands_go(self, qApp):
output_text = ""
exiftoolparams = " " + self.yourcommands_input.text() + " "
mysoftware = programinfo.NAME + " " + programinfo.VERSION
''''if self.OSplatform in ("Windows", "win32"):
exiftoolparams = " -ProcessingSoftware=\"" + mysoftware + "\" " + exiftoolparams
else:
exiftoolparams = " -ProcessingSoftware='" + mysoftware + "' " + exiftoolparams
'''
selected_rows = self.MaintableWidget.selectedIndexes()
if len(selected_rows) == 0:
self.the_no_photos_messagebox()
else:
print('number of rows ' + str(len(selected_rows)))
rowcounter = 0
total_rows = len(selected_rows)
self.progressbar.setRange(0, total_rows)
self.progressbar.setValue(0)
self.progressbar.show()
rows = []
for selected_row in selected_rows:
selected_row = str(selected_row)
selected_row = selected_row.replace("<PySide.QtCore.QModelIndex(",'')
selected_row, tail = re.split(',0x0',selected_row)
#print str(selected_row)
row, column = re.split(',',selected_row)
if row not in rows:
rows.append(row)
selected_image = "\"" + self.fileNames[int(row)] + "\""
print('exiftool ' + exiftoolparams + ' ' + selected_image)
rowcounter += 1
self.progressbar.setValue(rowcounter)
if self.OSplatform in ("Windows", "win32"):
# First write the info
selected_image = selected_image.replace("/", "\\")
args = '"' + self.exiftoolprog + '" ' + exiftoolparams + selected_image
try:
p = subprocess.check_output(args, universal_newlines=True, shell=True)
except:
p = "Your parameter(s) is/are wrong and could not be executed at all by exiftool.\nTherefore you don't get output."
else:
# First write the info
command_line = '"' + self.exiftoolprog + '" ' + exiftoolparams + selected_image
print(command_line)
args = shlex.split(command_line)
try:
p = subprocess.check_output(args, universal_newlines=True)
except:
p = "Your parameter(s) is/ware wrong and could not be executed at all by exiftool.\nTherefore you don't get output."
if p == "":
p = "Your parameters did not return output.\nEither there is no output or you did something wrong."
p = p[:-1]
#p_lines = re.split('\n',p)
self.statusbar.showMessage("Executing your parameter(s) on: " + selected_image)
self.yourcommands_output.insertPlainText("==== " + selected_image + " ====\n")
self.yourcommands_output.insertPlainText(str(p))
self.yourcommands_output.insertPlainText("\n\n\n")
self.progressbar.hide()
self.statusbar.showMessage("Finished executing your parameter(s)")
#------------------------------------------------------------------------
# Real exiftool read/write functions
def read_image_info(self, exiftool_params):
self.statusbar.showMessage("")
if self.images_view.currentText() == "by cells":
selected_image = "\"" + self.fileNames[int((self.MaintableWidget.columnCount()*self.MaintableWidget.currentRow())+self.MaintableWidget.currentColumn())] + "\""
else:
selected_image = "\"" + self.fileNames[self.MaintableWidget.currentRow()] + "\""
if self.OSplatform in ("Windows", "win32"):
selected_image = selected_image.replace("/", "\\")
args = '"' + self.exiftoolprog + '" ' + exiftool_params + selected_image
p = subprocess.check_output(args, universal_newlines=True, shell=True)
else:
command_line = '"' + self.exiftoolprog + '" ' + exiftool_params + selected_image
args = shlex.split(command_line)
p = subprocess.check_output(args, universal_newlines=True)
return p
def write_image_info(self, exiftoolparams, qApp, backup_originals):
mysoftware = programinfo.NAME + " " + programinfo.VERSION
xmpexportparam = ""
# silly if/elif/else statement. improve later
if exiftoolparams =="":
# nothing to do
self.statusbar.showMessage("no changes")
else:
if " -w! " in exiftoolparams:
# exporting metadata
print("exporting metadata")
#exiftoolparams += " -overwrite_original_in_place "
elif " -csv " in exiftoolparams:
# Create args file(s) from selected images(s)
print("Exporting metadata from selected images(s)to csv file")
images_to_csv = exiftoolparams + ' '
elif " -args " in exiftoolparams:
# Create args file(s) from selected images(s)
print("Create args file(s) from selected images(s)")
elif " xmpexport " in exiftoolparams:
# Create xmp file(s) from selected images(s) only for xmp data
print("Create xmp file(s) from selected images(s) only for xmp data")
# create extra variable otherwise exiftoolparams ovewrites original xmpexport string, bit clumsy but it works
xmpexportparam = exiftoolparams
elif " -FileModifyDate<DateTimeOriginal " in exiftoolparams:
print("Only change file date/time to DateTimeOriginal")
else:
# writing metadata info to photos
if backup_originals == True:
if self.OSplatform in ("Windows", "win32"):
exiftoolparams = " -P -ProcessingSoftware=\"" + mysoftware + "\" " + exiftoolparams
else:
exiftoolparams = " -P -ProcessingSoftware='" + mysoftware + "' " + exiftoolparams
else:
if self.OSplatform in ("Windows", "win32"):
exiftoolparams = " -P -overwrite_original_in_place -ProcessingSoftware=\"" + mysoftware + "\" " + exiftoolparams
else:
exiftoolparams = " -P -overwrite_original_in_place -ProcessingSoftware='" + mysoftware + "' " + exiftoolparams
selected_rows = self.MaintableWidget.selectedIndexes()
print('number of rows ' + str(len(selected_rows)))
rowcounter = 0
total_rows = len(selected_rows)
self.progressbar.setRange(0, total_rows)
self.progressbar.setValue(0)
self.progressbar.show()
rows = []
for selected_row in selected_rows:
#selected_row = str(selected_row)
#selected_row = selected_row.replace("<PySide.QtCore.QModelIndex(",'')
#selected_row, tail = re.split(',0x0',selected_row)
#print str(selected_row)
#row, column = re.split(',',selected_row)
row, column = selected_row.row(), selected_row.column()
if str(str(row)+","+str(column)) not in rows:
rows.append(str(row)+","+str(column))
if self.images_view.currentText() == "by cells":
selected_image = "\"" + self.fileNames[int((self.MaintableWidget.columnCount()*row)+column)] + "\""
else:
selected_image = "\"" + self.fileNames[int(row)] + "\""
print('exiftool ' + exiftoolparams + ' ' + selected_image)
#print 'exiftool "-FileModifyDate<DateTimeOriginal" ' + selected_image
rowcounter += 1
self.progressbar.setValue(rowcounter)
if " -csv " in exiftoolparams:
# First collect images. Do not write yet
# if self.OSplatform in ("Windows", "win32"):
# images_to_csv += " " + selected_image + " "
# else:
images_to_csv += ' ' + selected_image + ' '
#print images_to_csv
else:
# All other actions are performed per image.
if " -w " in exiftoolparams:
self.statusbar.showMessage("Exporting information from: " + os.path.basename(selected_image) + " to chosen export format")
elif " -args " in exiftoolparams:
self.statusbar.showMessage("Create args file from: " + os.path.basename(selected_image))
elif "copymetadatatoxmp" in exiftoolparams:
self.statusbar.showMessage("Create all metadata internally inside " + os.path.basename(selected_image) + " to xmp format")
if self.OSplatform in ("Windows", "win32"):
exiftoolparams = " -TagsFromFile " + selected_image.replace("/", "\\") + " \"-all>xmp:all\" "
else:
exiftoolparams = " -TagsFromFile " + selected_image + " '-all>xmp:all' "
else:
#check whether we do an xmp to xmp file export
if xmpexportparam == "":
# no it's not an xmp to xmp file export, this means all other actions
self.statusbar.showMessage("Writing information to: " + os.path.basename(selected_image))
else:
# less frequent so put the xmp export to xmp here
self.statusbar.showMessage("Create xmp file from: " + os.path.basename(selected_image))
base = os.path.basename(selected_image)
basexmp = os.path.splitext(base)[0] + ".xmp"
#print "basexmp " + basexmp
if os.path.isfile(os.path.join(self.image_folder, basexmp)):
# remove xmp file first as exiftool doesn't overwrite
fls = os.remove(os.path.join(self.image_folder, basexmp))
exiftoolparams = " -o \"" + os.path.join(self.image_folder, basexmp) + "\" -xmp "
qApp.processEvents()
if self.OSplatform in ("Windows", "win32"):
# First write the info
selected_image = selected_image.replace("/", "\\")
args = '"' + self.exiftoolprog + '" ' + exiftoolparams + selected_image
p = subprocess.call(args, shell=True)
else:
# First write the info
command_line = '"' + self.exiftoolprog + '" ' + exiftoolparams + selected_image
print(command_line)
args = shlex.split(command_line)
p = subprocess.call(args)
self.progressbar.hide()
# csv option: After having collected the images
if " -csv " in exiftoolparams:
# Use self.image_folder from loading the images
if self.OSplatform in ("Windows", "win32"):
parameters = " " + images_to_csv + " > \"" + os.path.join(self.image_folder, "output.csv") + "\""
#parameters = " " + images_to_csv + " > output.csv"
parameters = parameters.replace("/", "\\")
args = '"' + self.exiftoolprog + '" ' + parameters
print(args)
p = subprocess.call(args, shell=True)
else:
command_line = '"' + self.exiftoolprog + '" ' + images_to_csv + ' > \'' + os.path.join(self.image_folder, 'output.csv') + '\''
#args = shlex.split(command_line)
print(command_line)
#p = subprocess.call(args,shell=True)
p = subprocess.call(command_line,shell=True)
# end of csv option
if " -w " in exiftoolparams:
self.statusbar.showMessage("Done exporting the metadata for the selected image(s)")
elif " -args " in exiftoolparams:
self.statusbar.showMessage("Done creating the args file(s) for the selected image(s)")
elif " -csv " in exiftoolparams:
self.statusbar.showMessage("Done creating the csv file for the selected image(s)")
else:
self.statusbar.showMessage("Done writing the info to the selected image(s)")
| hvdwolf/pyExifToolGUI | scripts/petgfunctions.py | Python | gpl-3.0 | 115,680 |
"""
My purpose in life is to sync the mesosite stations table to other
databases. This will hopefully remove some hackery
"""
import sys
import numpy as np
from pyiem.util import get_dbconn, get_dbconnstr, logger, utc
from pandas import read_sql
LOG = logger()
def sync(df, dbname):
"""
Actually do the syncing, please
"""
# connect to synced database
dbconn = get_dbconn(dbname)
dbcursor = dbconn.cursor()
# Figure out our latest revision
dbcursor.execute("SELECT max(modified), max(iemid) from stations")
row = dbcursor.fetchone()
maxts = row[0] or utc(1980, 1, 1)
maxid = row[1] or -1
# Check for stations that were removed from mesosite
localdf = read_sql(
"SELECT iemid, modified from stations ORDER by iemid ASC",
get_dbconnstr(dbname),
index_col="iemid",
)
localdf["iemid"] = localdf.index.values
todelete = localdf.index.difference(df.index)
if not todelete.empty:
for iemid in todelete.values:
dbcursor.execute(
"DELETE from stations where iemid = %s",
(iemid,),
)
dbcursor.close()
dbconn.commit()
dbcursor = dbconn.cursor()
changes = df[(df["iemid"] > maxid) | (df["modified"] > maxts)]
for iemid, row in changes.iterrows():
prow = row.replace({np.nan: None}).to_dict()
if prow["iemid"] not in localdf.index:
dbcursor.execute(
"INSERT into stations(iemid, network, id) VALUES(%s, %s, %s)",
(prow["iemid"], prow["network"], prow["id"]),
)
# insert queried stations
dbcursor.execute(
"""
UPDATE stations SET name = %(name)s,
state = %(state)s, elevation = %(elevation)s, online = %(online)s,
geom = %(geom)s, params = %(params)s, county = %(county)s,
plot_name = %(plot_name)s, climate_site = %(climate_site)s,
wfo = %(wfo)s, archive_begin = %(archive_begin)s,
archive_end = %(archive_end)s, remote_id = %(remote_id)s,
tzname = %(tzname)s, country = %(country)s,
modified = %(modified)s, network = %(network)s,
metasite = %(metasite)s,
sigstage_low = %(sigstage_low)s,
sigstage_action = %(sigstage_action)s,
sigstage_bankfull = %(sigstage_bankfull)s,
sigstage_flood = %(sigstage_flood)s,
sigstage_moderate = %(sigstage_moderate)s,
sigstage_major = %(sigstage_major)s,
sigstage_record = %(sigstage_record)s, ugc_county = %(ugc_county)s,
ugc_zone = %(ugc_zone)s, id = %(id)s, ncdc81 = %(ncdc81)s,
ncei91 = %(ncei91)s,
temp24_hour = %(temp24_hour)s, precip24_hour = %(precip24_hour)s
WHERE iemid = %(iemid)s
""",
prow,
)
LOG.info(
"DB: %-7s Del %3s Mod %4s rows TS: %s IEMID: %s",
dbname,
len(todelete),
len(changes.index),
maxts.strftime("%Y/%m/%d %H:%M"),
maxid,
)
dbcursor.close()
dbconn.commit()
# close connection
dbconn.close()
def main(argv):
"""Go Main Go"""
mesosite = get_dbconnstr("mesosite")
subscribers = (
"iem isuag coop hads hml asos asos1min postgis raob"
).split()
if len(argv) == 3:
LOG.info(
"Running laptop syncing from upstream, assume iemdb is localhost!"
)
# HACK
mesosite = get_dbconnstr(
"mesosite", host="172.16.172.1", user="nobody"
)
subscribers.insert(0, "mesosite")
df = read_sql(
"SELECT * from stations ORDER by iemid ASC",
mesosite,
index_col="iemid",
)
df["iemid"] = df.index.values
for sub in subscribers:
sync(df, sub)
if __name__ == "__main__":
main(sys.argv)
| akrherz/iem | scripts/dbutil/sync_stations.py | Python | mit | 3,895 |
#!/usr/bin/python2.4
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2005-2009 Bastian Kleineidam
"""
Print content of an URL.
"""
import httplib
import urlparse
import sys
def _main():
"""
USAGE: scripts/run.sh test/get.py <url>
"""
if len(sys.argv) != 2:
print _main.__doc__.strip()
sys.exit(1)
url = sys.argv[1]
parts = urlparse.urlsplit(url)
host = parts[1]
path = urlparse.urlunsplit(('', '', parts[2], parts[3], parts[4]))
h = httplib.HTTPConnection(host)
h.set_debuglevel(1)
h.connect()
h.putrequest("GET", path, skip_host=1)
h.putheader("Host", host)
h.endheaders()
req = h.getresponse()
print req.read()
if __name__ == '__main__':
_main()
| HomeRad/TorCleaner | test/get.py | Python | gpl-2.0 | 733 |
# -*- coding: utf-8 -*-
"""A modulestore wrapper
It will 'unwrap' ccx keys on the way in and re-wrap them on the way out
In practical terms this means that when an object is retrieved from modulestore
using a CCXLocator or CCXBlockUsageLocator as the key, the equivalent
CourseLocator or BlockUsageLocator will actually be used. And all objects
returned from the modulestore will have their keys updated to be the CCX
version that was passed in.
"""
from contextlib import contextmanager
from functools import partial
from ccx_keys.locator import CCXBlockUsageLocator, CCXLocator
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from xmodule.modulestore import XMODULE_FIELDS_WITH_USAGE_KEYS
def strip_ccx(val):
"""remove any reference to a CCX from the incoming value
return a tuple of the stripped value and the id of the ccx
"""
retval = val
ccx_id = None
if isinstance(retval, CCXLocator):
ccx_id = retval.ccx
retval = retval.to_course_locator()
elif isinstance(retval, CCXBlockUsageLocator):
ccx_id = retval.course_key.ccx
retval = retval.to_block_locator()
else:
for field_name in XMODULE_FIELDS_WITH_USAGE_KEYS:
if hasattr(retval, field_name):
stripped_field_value, ccx_id = strip_ccx(getattr(retval, field_name))
setattr(retval, field_name, stripped_field_value)
return retval, ccx_id
def restore_ccx(val, ccx_id):
"""restore references to a CCX to the incoming value
returns the value converted to a CCX-aware state, using the provided ccx_id
"""
if isinstance(val, CourseLocator):
return CCXLocator.from_course_locator(val, ccx_id)
elif isinstance(val, BlockUsageLocator):
ccx_key = restore_ccx(val.course_key, ccx_id)
val = CCXBlockUsageLocator(ccx_key, val.block_type, val.block_id)
for field_name in XMODULE_FIELDS_WITH_USAGE_KEYS:
if hasattr(val, field_name):
setattr(val, field_name, restore_ccx(getattr(val, field_name), ccx_id))
if hasattr(val, 'children'):
val.children = restore_ccx_collection(val.children, ccx_id)
return val
def restore_ccx_collection(field_value, ccx_id=None):
"""restore references to a CCX to collections of incoming values
returns the original collection with all values converted to a ccx-aware
state, using the provided ccx_id
"""
if ccx_id is None:
return field_value
if isinstance(field_value, list):
field_value = [restore_ccx(fv, ccx_id) for fv in field_value]
elif isinstance(field_value, dict):
for key, val in field_value.iteritems():
field_value[key] = restore_ccx(val, ccx_id)
else:
field_value = restore_ccx(field_value, ccx_id)
return field_value
@contextmanager
def remove_ccx(to_strip):
"""A context manager for wrapping modulestore api methods.
yields a stripped value and a function suitable for restoring it
"""
stripped, ccx = strip_ccx(to_strip)
yield stripped, partial(restore_ccx_collection, ccx_id=ccx)
class CCXModulestoreWrapper(object):
"""This class wraps a modulestore
The purpose is to remove ccx-specific identifiers during lookup and restore
it after retrieval so that data can be stored local to a course, but
referenced in app context as ccx-specific
"""
def __init__(self, modulestore):
"""wrap the provided modulestore"""
self.__dict__['_modulestore'] = modulestore
def __getattr__(self, name):
"""look up missing attributes on the wrapped modulestore"""
return getattr(self._modulestore, name)
def __setattr__(self, name, value):
"""set attributes only on the wrapped modulestore"""
setattr(self._modulestore, name, value)
def __delattr__(self, name):
"""delete attributes only on the wrapped modulestore"""
delattr(self._modulestore, name)
def _clean_locator_for_mapping(self, locator):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(locator) as (locator, restore):
# pylint: disable=protected-access
return restore(
self._modulestore._clean_locator_for_mapping(locator)
)
def _get_modulestore_for_courselike(self, locator=None):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
if locator is not None:
locator, _ = strip_ccx(locator)
# pylint: disable=protected-access
return self._modulestore._get_modulestore_for_courselike(locator)
def fill_in_run(self, course_key):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.fill_in_run(course_key))
def has_item(self, usage_key, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
usage_key, _ = strip_ccx(usage_key)
return self._modulestore.has_item(usage_key, **kwargs)
def get_item(self, usage_key, depth=0, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(usage_key) as (usage_key, restore):
return restore(
self._modulestore.get_item(usage_key, depth, **kwargs)
)
def get_items(self, course_key, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.get_items(course_key, **kwargs))
def get_course(self, course_key, depth=0, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.get_course(
course_key, depth=depth, **kwargs
))
def has_course(self, course_id, ignore_case=False, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_id) as (course_id, restore):
return restore(self._modulestore.has_course(
course_id, ignore_case=ignore_case, **kwargs
))
def delete_course(self, course_key, user_id):
"""
See xmodule.modulestore.__init__.ModuleStoreWrite.delete_course
"""
course_key, _ = strip_ccx(course_key)
return self._modulestore.delete_course(course_key, user_id)
def get_parent_location(self, location, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.get_parent_location(location, **kwargs)
)
def get_block_original_usage(self, usage_key):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(usage_key) as (usage_key, restore):
orig_key, version = self._modulestore.get_block_original_usage(usage_key)
return restore(orig_key), version
def get_modulestore_type(self, course_id):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_id) as (course_id, restore):
return restore(self._modulestore.get_modulestore_type(course_id))
def get_orphans(self, course_key, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.get_orphans(course_key, **kwargs))
def clone_course(self, source_course_id, dest_course_id, user_id, fields=None, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(source_course_id) as (source_course_id, _):
with remove_ccx(dest_course_id) as (dest_course_id, dest_restore):
return dest_restore(self._modulestore.clone_course(
source_course_id, dest_course_id, user_id, fields=fields, **kwargs
))
def create_item(self, user_id, course_key, block_type, block_id=None, fields=None, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.create_item(
user_id, course_key, block_type, block_id=block_id, fields=fields, **kwargs
))
def create_child(self, user_id, parent_usage_key, block_type, block_id=None, fields=None, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(parent_usage_key) as (parent_usage_key, restore):
return restore(self._modulestore.create_child(
user_id, parent_usage_key, block_type, block_id=block_id, fields=fields, **kwargs
))
def import_xblock(self, user_id, course_key, block_type, block_id, fields=None, runtime=None, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.import_xblock(
user_id, course_key, block_type, block_id, fields=fields, runtime=runtime, **kwargs
))
def copy_from_template(self, source_keys, dest_key, user_id, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(dest_key) as (dest_key, restore):
return restore(self._modulestore.copy_from_template(
source_keys, dest_key, user_id, **kwargs
))
def update_item(self, xblock, user_id, allow_not_found=False, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(xblock) as (xblock, restore):
return restore(self._modulestore.update_item(
xblock, user_id, allow_not_found=allow_not_found, **kwargs
))
def delete_item(self, location, user_id, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.delete_item(location, user_id, **kwargs)
)
def revert_to_published(self, location, user_id):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.revert_to_published(location, user_id)
)
def create_xblock(self, runtime, course_key, block_type, block_id=None, fields=None, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(course_key) as (course_key, restore):
return restore(self._modulestore.create_xblock(
runtime, course_key, block_type, block_id=block_id, fields=fields, **kwargs
))
def has_published_version(self, xblock):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(xblock) as (xblock, restore):
return restore(self._modulestore.has_published_version(xblock))
def publish(self, location, user_id, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.publish(location, user_id, **kwargs)
)
def unpublish(self, location, user_id, **kwargs):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.unpublish(location, user_id, **kwargs)
)
def convert_to_draft(self, location, user_id):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(location) as (location, restore):
return restore(
self._modulestore.convert_to_draft(location, user_id)
)
def has_changes(self, xblock):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
with remove_ccx(xblock) as (xblock, restore):
return restore(self._modulestore.has_changes(xblock))
def check_supports(self, course_key, method):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
course_key, _ = strip_ccx(course_key)
return self._modulestore.check_supports(course_key, method)
@contextmanager
def branch_setting(self, branch_setting, course_id=None):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
course_id, _ = strip_ccx(course_id)
with self._modulestore.branch_setting(branch_setting, course_id):
yield
@contextmanager
def bulk_operations(self, course_id, emit_signals=True, ignore_case=False):
"""See the docs for xmodule.modulestore.mixed.MixedModuleStore"""
course_id, _ = strip_ccx(course_id)
with self._modulestore.bulk_operations(course_id, emit_signals=emit_signals, ignore_case=ignore_case):
yield
| ahmedaljazzar/edx-platform | lms/djangoapps/ccx/modulestore.py | Python | agpl-3.0 | 13,437 |
# (c) 2012-2018, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
import django
from django.db import models
from distutils.version import LooseVersion
if django.VERSION < (1, 7):
from south.modelsinspector import add_introspection_rules
else:
def add_introspection_rules(*args, **kwargs):
pass
__all__ = ['LooseVersionField', 'TruncatingCharField']
class LooseVersionField(models.Field):
''' store and return values as a LooseVersion '''
def db_type(self, connection):
return 'varchar(64)'
def get_internal_type(self):
return 'CharField'
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_prep_value(value)
def to_python(self, value):
try:
return LooseVersion(value)
except:
return value
def get_prep_value(self, value):
return str(value)
add_introspection_rules([], ["^galaxy\.main\.fields\.LooseVersionField"])
# From:
# http://stackoverflow.com/questions/3459843/auto-truncating-fields-at-max-length-in-django-charfields
class TruncatingCharField(models.CharField):
def get_prep_value(self, value):
value = super(TruncatingCharField, self).get_prep_value(value)
if value and len(value) > self.max_length:
return value[:self.max_length - 3] + '...'
return value
add_introspection_rules([], ["^galaxy\.main\.fields\.TruncatingCharField"])
| chouseknecht/galaxy | galaxy/main/fields.py | Python | apache-2.0 | 2,063 |
#!/usr/bin/env python
"""
Returns namedtuples of disk partitions and their usages.
from a comment here by Giampaolo Rodola:
http://stackoverflow.com/questions/4260116/find-size-and-free-space-of-the-filesystem-containing-a-given-file
"""
import os
from collections import namedtuple
disk_ntuple = namedtuple('partition', 'device mountpoint fstype')
usage_ntuple = namedtuple('usage', 'total used free percent')
def disk_partitions(all=False):
"""Return all mountd partitions as a nameduple.
If all == False return phyisical partitions only.
"""
phydevs = []
f = open("/proc/filesystems", "r")
for line in f:
if not line.startswith("nodev"):
phydevs.append(line.strip())
retlist = []
f = open('/etc/mtab', "r")
for line in f:
if not all and line.startswith('none'):
continue
fields = line.split()
device = fields[0]
mountpoint = fields[1]
fstype = fields[2]
if not all and fstype not in phydevs:
continue
if device == 'none':
device = ''
ntuple = disk_ntuple(device, mountpoint, fstype)
retlist.append(ntuple)
return retlist
def disk_usage(path):
"""Return disk usage associated with path."""
st = os.statvfs(path)
free = (st.f_bavail * st.f_frsize)
total = (st.f_blocks * st.f_frsize)
used = (st.f_blocks - st.f_bfree) * st.f_frsize
try:
percent = ret = (float(used) / total) * 100
except ZeroDivisionError:
percent = 0
# NB: the percentage is -5% than what shown by df due to
# reserved blocks that we are currently not considering:
# http://goo.gl/sWGbH
return usage_ntuple(total, used, free, round(percent, 1))
if __name__ == '__main__':
for part in disk_partitions():
print part
print " %s\n" % str(disk_usage(part.mountpoint))
| gboling/Raspberry-Bushcam | diskusage.py | Python | gpl-3.0 | 1,899 |
import pymongo
import bson
import unittest
import mock
import cPickle
from datetime import datetime
from mongoengine import *
from mongoengine.base import _document_registry
from mongoengine.connection import _get_db, connect
import mongoengine.connection
from mock import MagicMock, Mock, call
import pymongo
mongoengine.connection.set_default_db("test")
# has to be top level for pickling
class Citizen(Document):
age = mongoengine.fields.IntField()
class DocumentTest(unittest.TestCase):
def setUp(self):
connect()
self.db = _get_db()
class Person(Document):
name = StringField()
age = IntField()
uid = ObjectIdField()
friends = ListField(StringField())
self.Person = Person
def tearDown(self):
self.Person.drop_collection()
_document_registry.clear()
Citizen.drop_collection()
def test_bool(self):
class EmptyDoc(EmbeddedDocument):
pass
empty_doc = EmptyDoc()
self.assertTrue(bool(empty_doc))
nonempty_doc = self.Person(name='Adam')
self.assertTrue(bool(nonempty_doc))
def test_drop_collection(self):
"""Ensure that the collection may be dropped from the database.
"""
self.Person(name='Test').save()
collection = self.Person._get_collection_name()
self.assertTrue(collection in self.db.collection_names())
self.Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_definition(self):
"""Ensure that document may be defined using fields.
"""
name_field = StringField()
age_field = IntField()
_document_registry.clear()
class Person(Document):
name = name_field
age = age_field
non_field = True
self.assertEqual(Person._fields['name'], name_field)
self.assertEqual(Person._fields['age'], age_field)
self.assertFalse('non_field' in Person._fields)
self.assertTrue('id' in Person._fields)
# Test iteration over fields
fields = list(Person())
self.assertTrue('name' in fields and 'age' in fields)
# Ensure Document isn't treated like an actual document
self.assertFalse(hasattr(Document, '_fields'))
def test_get_superclasses(self):
"""Ensure that the correct list of superclasses is assembled.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_superclasses = {'Animal': Animal}
self.assertEqual(Mammal._superclasses, mammal_superclasses)
dog_superclasses = {
'Animal': Animal,
'Animal.Mammal': Mammal,
}
self.assertEqual(Dog._superclasses, dog_superclasses)
def test_get_subclasses(self):
"""Ensure that the correct list of subclasses is retrieved by the
_get_subclasses method.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_subclasses = {
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
animal_subclasses = {
'Animal.Fish': Fish,
'Animal.Mammal': Mammal,
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
def test_external_super_and_sub_classes(self):
"""Ensure that the correct list of sub and super classes is assembled.
when importing part of the model
"""
class Base(Document): pass
class Animal(Base): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_superclasses = {'Base': Base, 'Base.Animal': Animal}
self.assertEqual(Mammal._superclasses, mammal_superclasses)
dog_superclasses = {
'Base': Base,
'Base.Animal': Animal,
'Base.Animal.Mammal': Mammal,
}
self.assertEqual(Dog._superclasses, dog_superclasses)
animal_subclasses = {
'Base.Animal.Fish': Fish,
'Base.Animal.Mammal': Mammal,
'Base.Animal.Mammal.Dog': Dog,
'Base.Animal.Mammal.Human': Human
}
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
mammal_subclasses = {
'Base.Animal.Mammal.Dog': Dog,
'Base.Animal.Mammal.Human': Human
}
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
Base.drop_collection()
h = Human()
h.save()
self.assertEquals(Human.count({}), 1)
self.assertEquals(Mammal.count({}), 1)
self.assertEquals(Animal.count({}), 1)
self.assertEquals(Base.count({}), 1)
Base.drop_collection()
def test_polymorphic_queries(self):
"""Ensure that the correct subclasses are returned from a query"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
Animal.drop_collection()
Animal().save()
Fish().save()
Mammal().save()
Human().save()
Dog().save()
classes = [obj.__class__ for obj in Animal.objects]
self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog])
classes = [obj.__class__ for obj in Mammal.objects]
self.assertEqual(classes, [Mammal, Human, Dog])
classes = [obj.__class__ for obj in Human.objects]
self.assertEqual(classes, [Human])
Animal.drop_collection()
def test_reference_inheritance(self):
class Stats(Document):
created = DateTimeField(default=datetime.now)
meta = {'allow_inheritance': False}
class CompareStats(Document):
generated = DateTimeField(default=datetime.now)
stats = ListField(ReferenceField(Stats))
Stats.drop_collection()
CompareStats.drop_collection()
list_stats = []
for i in xrange(10):
s = Stats()
s.save()
list_stats.append(s)
cmp_stats = CompareStats(stats=list_stats)
cmp_stats.save()
self.assertEqual(list_stats, CompareStats.find_one({}).stats)
def test_inheritance(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Employee(self.Person):
salary = IntField()
self.assertTrue('name' in Employee._fields)
self.assertTrue('salary' in Employee._fields)
self.assertEqual(Employee._get_collection_name(),
self.Person._get_collection_name())
# Ensure that MRO error is not raised
class A(Document): pass
class B(A): pass
class C(B): pass
def test_allow_inheritance(self):
"""Ensure that inheritance may be disabled on simple classes and that
_cls and _types will not be used.
"""
class Animal(Document):
name = StringField()
meta = {'allow_inheritance': False}
Animal.drop_collection()
def create_dog_class():
class Dog(Animal):
pass
self.assertRaises(ValueError, create_dog_class)
# Check that _cls etc aren't present on simple documents
dog = Animal(name='dog')
dog.save()
collection = self.db[Animal._get_collection_name()]
obj = collection.find_one()
self.assertFalse('_cls' in obj)
self.assertFalse('_types' in obj)
Animal.drop_collection()
def create_employee_class():
class Employee(self.Person):
meta = {'allow_inheritance': False}
self.assertRaises(ValueError, create_employee_class)
# Test the same for embedded documents
class Comment(EmbeddedDocument):
content = StringField()
meta = {'allow_inheritance': False}
def create_special_comment():
class SpecialComment(Comment):
pass
self.assertRaises(ValueError, create_special_comment)
comment = Comment(content='test')
self.assertFalse('_cls' in comment.to_mongo())
self.assertFalse('_types' in comment.to_mongo())
def test_collection_name(self):
"""Ensure that a collection with a specified name may be used.
"""
collection = 'personCollTest'
if collection in self.db.collection_names():
self.db.drop_collection(collection)
_document_registry.clear()
class Person(Document):
name = StringField()
meta = {'collection': collection}
user = Person(name="Test User")
user.save()
self.assertTrue(collection in self.db.collection_names())
user_obj = self.db[collection].find_one()
self.assertEqual(user_obj['name'], "Test User")
user_obj = Person.objects[0]
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_collection_name_and_primary(self):
"""Ensure that a collection with a specified name may be used.
"""
_document_registry.clear()
class Person(Document):
name = StringField(primary_key=True)
meta = {'collection': 'app'}
user = Person(name="Test User")
user.save()
user_obj = Person.objects[0]
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
def test_creation(self):
"""Ensure that document may be created using keyword arguments.
"""
person = self.Person(name="Test User", age=30)
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 30)
def test_reload(self):
"""Ensure that attributes may be reloaded.
"""
person = self.Person(name="Test User", age=20)
person.save()
person_obj = self.Person.find_one({})
person_obj.name = "Mr Test User"
person_obj.age = 21
person_obj.save()
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 20)
person.reload()
self.assertEqual(person.name, "Mr Test User")
self.assertEqual(person.age, 21)
def test_dictionary_access(self):
"""Ensure that dictionary-style field access works properly.
"""
person = self.Person(name='Test User', age=30)
self.assertEquals(person['name'], 'Test User')
self.assertRaises(KeyError, person.__getitem__, 'salary')
self.assertRaises(KeyError, person.__setitem__, 'salary', 50)
person['name'] = 'Another User'
self.assertEquals(person['name'], 'Another User')
# Length = length(assigned fields + id)
self.assertEquals(len(person), 5)
self.assertTrue('age' in person)
person.age = None
self.assertFalse('age' in person)
self.assertFalse('nationality' in person)
def test_embedded_document(self):
"""Ensure that embedded documents are set up correctly.
"""
class Comment(EmbeddedDocument):
content = StringField()
self.assertTrue('content' in Comment._fields)
self.assertFalse('id' in Comment._fields)
self.assertFalse('collection' in Comment._meta)
def test_embedded_document_validation(self):
"""Ensure that embedded documents may be validated.
"""
class Comment(EmbeddedDocument):
date = DateTimeField()
content = StringField(required=True)
comment = Comment()
self.assertRaises(ValidationError, comment.validate)
comment.content = 'test'
comment.validate()
comment.date = 4
self.assertRaises(ValidationError, comment.validate)
comment.date = datetime.now()
comment.validate()
def test_save(self):
"""Ensure that a document may be saved in the database.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30)
person.save()
# Ensure that the object is in the database
collection = self.db[self.Person._get_collection_name()]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(person_obj['name'], 'Test User')
self.assertEqual(person_obj['age'], 30)
self.assertEqual(person_obj['_id'], person.id)
# Test skipping validation on save
class Recipient(Document):
email = EmailField(required=True)
recipient = Recipient(email='root@localhost')
self.assertRaises(ValidationError, recipient.save)
try:
recipient.save(validate=False)
except ValidationError:
self.fail()
def test_save_to_a_value_that_equates_to_false(self):
class Thing(EmbeddedDocument):
count = IntField()
class User(Document):
thing = EmbeddedDocumentField(Thing)
User.drop_collection()
user = User(thing=Thing(count=1))
user.save()
user.reload()
user.thing.count = 0
user.save()
user.reload()
self.assertEquals(user.thing.count, 0)
def test_save_max_recursion_not_hit(self):
_document_registry.clear()
class Person(Document):
name = StringField()
parent = ReferenceField('self')
friend = ReferenceField('self')
Person.drop_collection()
p1 = Person(name="Wilson Snr")
p1.parent = None
p1.save()
p2 = Person(name="Wilson Jr")
p2.parent = p1
p2.save()
p1.friend = p2
p1.save()
# Confirm can save and it resets the changed fields without hitting
# max recursion error
p0 = Person.find_one({})
p0.name = 'wpjunior'
p0.save()
def test_update(self):
"""Ensure that an existing document is updated instead of be overwritten.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30)
person.save()
# Create same person object, with same id, without age
same_person = self.Person(name='Test')
same_person.id = person.id
same_person.save()
# Confirm only one object
self.assertEquals(self.Person.count({}), 1)
# reload
person.reload()
same_person.reload()
# Confirm the same
self.assertEqual(person, same_person)
self.assertEqual(person.name, same_person.name)
self.assertEqual(person.age, same_person.age)
# Confirm the saved values
self.assertEqual(person.name, 'Test')
self.assertIsNone(person.age)
def test_document_update(self):
person = self.Person(name='dcrosta',
id=bson.ObjectId(), uid=bson.ObjectId())
resp = person.set(name='Dan Crosta')
self.assertEquals(resp['n'], 0)
author = self.Person(name='dcrosta')
author.save()
author.set(name='Dan Crosta')
author.reload()
p1 = self.Person.find_one({})
self.assertEquals(p1.name, author.name)
p1.set(uid=None)
p1.reload()
self.assertEquals(p1.uid, None)
def unset_primary_key():
person = self.Person.find_one({})
person.set(id=None)
def update_no_value_raises():
person = self.Person.find_one({})
person.set()
self.assertRaises(pymongo.errors.OperationFailure, unset_primary_key)
self.assertRaises(pymongo.errors.OperationFailure, update_no_value_raises)
def test_addtoset_on_null_list(self):
person = self.Person(
name = 'Bruce Banner',
id = bson.ObjectId(),
uid = bson.ObjectId(),
friends = None
)
person.save()
person.update_one({'$addToSet' : {'friends' : {'$each' : ['Bob','Fan']}}})
person.reload()
self.assertTrue(len(person.friends) == 2)
self.assertTrue('Bob' in person.friends)
self.assertTrue('Fan' in person.friends)
def test_non_existant_inc(self):
person = self.Person(name='dcrosta',
id=bson.ObjectId(), uid=bson.ObjectId())
person.save()
person.inc(age=5)
person.reload()
self.assertEquals(person.age, 5)
person.inc(age=5)
person.reload()
self.assertEquals(person.age, 10)
def test_embedded_update(self):
"""
Test update on `EmbeddedDocumentField` fields
"""
class Page(EmbeddedDocument):
log_message = StringField(required=True)
class Site(Document):
page = EmbeddedDocumentField(Page)
Site.drop_collection()
site = Site(page=Page(log_message="Warning: Dummy message"))
site.save()
# Update
site = Site.find_one({})
site.page.log_message = "Error: Dummy message"
site.save()
site = Site.find_one({})
self.assertEqual(site.page.log_message, "Error: Dummy message")
def test_embedded_update_db_field(self):
"""
Test update on `EmbeddedDocumentField` fields when db_field is other
than default.
"""
class Page(EmbeddedDocument):
log_message = StringField(db_field="page_log_message",
required=True)
class Site(Document):
page = EmbeddedDocumentField(Page)
Site.drop_collection()
site = Site(page=Page(log_message="Warning: Dummy message"))
site.save()
# Update
site = Site.find_one({})
site.page.log_message = "Error: Dummy message"
site.save()
site = Site.find_one({})
self.assertEqual(site.page.log_message, "Error: Dummy message")
def test_delete(self):
"""Ensure that document may be deleted using the delete method.
"""
person = self.Person(name="Test User", age=30)
person.save()
self.assertEqual(len(self.Person.objects), 1)
person.delete()
self.assertEqual(len(self.Person.objects), 0)
def test_save_custom_id(self):
"""Ensure that a document may be saved with a custom _id.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30,
id='497ce96f395f2f052a494fd4')
person.save()
# Ensure that the object is in the database with the correct _id
collection = self.db[self.Person._get_collection_name()]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
def test_save_custom_pk(self):
"""Ensure that a document may be saved with a custom _id using pk alias.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30,
pk='497ce96f395f2f052a494fd4')
person.save()
# Ensure that the object is in the database with the correct _id
collection = self.db[self.Person._get_collection_name()]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
def test_save_list(self):
"""Ensure that a list field may be properly saved.
"""
class Comment(EmbeddedDocument):
content = StringField()
class BlogPost(Document):
content = StringField()
comments = ListField(EmbeddedDocumentField(Comment))
tags = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost(content='Went for a walk today...')
post.tags = tags = ['fun', 'leisure']
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
post.comments = comments
post.save()
collection = self.db[BlogPost._get_collection_name()]
post_obj = collection.find_one()
self.assertEqual(post_obj['tags'], tags)
for comment_obj, comment in zip(post_obj['comments'], comments):
self.assertEqual(comment_obj['content'], comment['content'])
BlogPost.drop_collection()
def test_list_search_by_embedded(self):
class User(Document):
username = StringField(required=True)
meta = {'allow_inheritance': False}
class Comment(EmbeddedDocument):
comment = StringField()
user = ReferenceField(User,
required=True)
meta = {'allow_inheritance': False}
class Page(Document):
comments = ListField(EmbeddedDocumentField(Comment))
meta = {'allow_inheritance': False}
User.drop_collection()
Page.drop_collection()
u1 = User(username="wilson")
u1.save()
u2 = User(username="rozza")
u2.save()
u3 = User(username="hmarr")
u3.save()
p1 = Page(comments = [Comment(user=u1, comment="Its very good"),
Comment(user=u2, comment="Hello world"),
Comment(user=u3, comment="Ping Pong"),
Comment(user=u1, comment="I like a beer")])
p1.save()
p2 = Page(comments = [Comment(user=u1, comment="Its very good"),
Comment(user=u2, comment="Hello world")])
p2.save()
p3 = Page(comments = [Comment(user=u3, comment="Its very good")])
p3.save()
p4 = Page(comments = [Comment(user=u2, comment="Heavy Metal song")])
p4.save()
self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1)))
self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2)))
self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3)))
def test_save_embedded_document(self):
"""Ensure that a document with an embedded document field may be
saved in the database.
"""
class EmployeeDetails(EmbeddedDocument):
position = StringField()
class Employee(self.Person):
salary = IntField()
details = EmbeddedDocumentField(EmployeeDetails)
# Create employee object and save it to the database
employee = Employee(name='Test Employee', age=50, salary=20000)
employee.details = EmployeeDetails(position='Developer')
employee.save()
# Ensure that the object is in the database
collection = self.db[self.Person._get_collection_name()]
employee_obj = collection.find_one({'name': 'Test Employee'})
self.assertEqual(employee_obj['name'], 'Test Employee')
self.assertEqual(employee_obj['age'], 50)
# Ensure that the 'details' embedded object saved correctly
self.assertEqual(employee_obj['details']['position'], 'Developer')
def test_updating_an_embedded_document(self):
"""Ensure that a document with an embedded document field may be
saved in the database.
"""
class EmployeeDetails(EmbeddedDocument):
position = StringField()
class Employee(self.Person):
salary = IntField()
details = EmbeddedDocumentField(EmployeeDetails)
# Create employee object and save it to the database
employee = Employee(name='Test Employee', age=50, salary=20000)
employee.details = EmployeeDetails(position='Developer')
employee.save()
# Test updating an embedded document
promoted_employee = Employee.objects.get(name='Test Employee')
promoted_employee.details.position = 'Senior Developer'
promoted_employee.save()
promoted_employee.reload()
self.assertEqual(promoted_employee.name, 'Test Employee')
self.assertEqual(promoted_employee.age, 50)
# Ensure that the 'details' embedded object saved correctly
self.assertEqual(promoted_employee.details.position, 'Senior Developer')
# Test removal
promoted_employee.details = None
promoted_employee.save()
promoted_employee.reload()
self.assertEqual(promoted_employee.details, None)
def test_save_reference(self):
"""Ensure that a document reference field may be saved in the database.
"""
class BlogPost(Document):
meta = {'collection': 'blogpost_1'}
content = StringField()
author = ReferenceField(self.Person)
BlogPost.drop_collection()
author = self.Person(name='Test User')
author.save()
post = BlogPost(content='Watched some TV today... how exciting.')
# Should only reference author when saving
post.author = author
post.save()
post_obj = BlogPost.find_one({})
# Test laziness
self.assertTrue(isinstance(post_obj._lazy_data['author'],
bson.dbref.DBRef))
self.assertTrue(isinstance(post_obj.author, self.Person))
self.assertEqual(post_obj.author.name, 'Test User')
# Ensure that the dereferenced object may be changed and saved
post_obj.author.age = 25
post_obj.author.save()
author = list(self.Person.objects(name='Test User'))[-1]
self.assertEqual(author.age, 25)
BlogPost.drop_collection()
def subclasses_and_unique_keys_works(self):
class A(Document):
pass
class B(A):
foo = BooleanField(unique=True)
A.drop_collection()
B.drop_collection()
A().save()
A().save()
B(foo=True).save()
self.assertEquals(A.count({}), 2)
self.assertEquals(B.count({}), 1)
A.drop_collection()
B.drop_collection()
def test_document_hash(self):
"""Test document in list, dict, set
"""
class User(Document):
pass
class BlogPost(Document):
pass
# Clear old datas
User.drop_collection()
BlogPost.drop_collection()
u1 = User.objects.create()
u2 = User.objects.create()
u3 = User.objects.create()
u4 = User() # New object
b1 = BlogPost.objects.create()
b2 = BlogPost.objects.create()
# in List
all_user_list = list(User.objects.all())
self.assertTrue(u1 in all_user_list)
self.assertTrue(u2 in all_user_list)
self.assertTrue(u3 in all_user_list)
self.assertFalse(u4 in all_user_list) # New object
self.assertFalse(b1 in all_user_list) # Other object
self.assertFalse(b2 in all_user_list) # Other object
# in Dict
all_user_dic = {}
for u in User.objects.all():
all_user_dic[u] = "OK"
self.assertEqual(all_user_dic.get(u1, False), "OK" )
self.assertEqual(all_user_dic.get(u2, False), "OK" )
self.assertEqual(all_user_dic.get(u3, False), "OK" )
self.assertEqual(all_user_dic.get(u4, False), False ) # New object
self.assertEqual(all_user_dic.get(b1, False), False ) # Other object
self.assertEqual(all_user_dic.get(b2, False), False ) # Other object
# in Set
all_user_set = set(User.find({}))
self.assertTrue(u1 in all_user_set)
def throw_invalid_document_error(self):
# test handles people trying to upsert
def throw_invalid_document_error():
class Blog(Document):
validate = DictField()
self.assertRaises(InvalidDocumentError, throw_invalid_document_error)
def test_write_concern(self):
class ImportantThing(Document):
meta = {'write_concern': 2}
name = StringField()
class MajorityThing(Document):
meta = {'write_concern': 'majority',
'force_insert': True}
name = StringField()
class NormalThing(Document):
name = StringField()
# test save() of ImportantThing gets w=2
with mock.patch.object(ImportantThing._pymongo(), "save") as save_mock:
it = ImportantThing(id=bson.ObjectId())
save_mock.return_value = it.id
it.save()
save_mock.assert_called_with(it.to_mongo(), w=2)
# test insert() of MajorityThing gets w=majority
# note: uses insert() because force_insert is set
with mock.patch.object(MajorityThing._pymongo(), "insert") as insert_mock:
mt = MajorityThing(id=bson.ObjectId())
insert_mock.return_value = mt.id
mt.save()
insert_mock.assert_called_with(mt.to_mongo(), w='majority')
# test NormalThing gets default w=1
with mock.patch.object(NormalThing._pymongo(), "save") as save_mock:
nt = NormalThing(id=bson.ObjectId())
save_mock.return_value = nt.id
nt.save()
save_mock.assert_called_with(nt.to_mongo(), w=1)
# test ImportantThing update gets w=2
with mock.patch.object(ImportantThing._pymongo(), "update") as update_mock:
it.set(name="Adam")
self.assertEquals(update_mock.call_count, 1)
self.assertEquals(update_mock.call_args[1]['w'], 2)
# test MajorityThing update gets w=majority
with mock.patch.object(MajorityThing._pymongo(), "update") as update_mock:
mt.set(name="Adam")
self.assertEquals(update_mock.call_count, 1)
self.assertEquals(update_mock.call_args[1]['w'], "majority")
# test NormalThing update gets w=1
with mock.patch.object(NormalThing._pymongo(), "update") as update_mock:
nt.set(name="Adam")
self.assertEquals(update_mock.call_count, 1)
self.assertEquals(update_mock.call_args[1]['w'], 1)
def test_by_id_key(self):
class UnshardedCollection(Document):
pass
class IdShardedCollection(Document):
meta = {'hash_field': 'id'}
class NonIdShardedCollection(Document):
meta = {'hash_field': 'name'}
name = mongoengine.fields.StringField()
doc_id = bson.ObjectId()
# unsharded and non-ID sharded collections don't have anything injected
self.assertEquals(UnshardedCollection._by_id_key(doc_id),
{'_id': doc_id})
self.assertEquals(NonIdShardedCollection._by_id_key(doc_id),
{'_id': doc_id})
# ID-sharded collections get the hash injected
self.assertEquals(IdShardedCollection._by_id_key(doc_id),
{'_id': doc_id,
'shard_hash': IdShardedCollection._hash(doc_id)})
def test_by_ids_key(self):
class UnshardedCollection(Document):
pass
class IdShardedCollection(Document):
meta = {'hash_field': 'id'}
class NonIdShardedCollection(Document):
meta = {'hash_field': 'name'}
name = mongoengine.fields.StringField()
doc_ids = [bson.ObjectId() for i in xrange(5)]
# unsharded and non-ID sharded collections don't have anything injected
self.assertEquals(UnshardedCollection._by_ids_key(doc_ids),
{'_id': {'$in': doc_ids}})
self.assertEquals(NonIdShardedCollection._by_ids_key(doc_ids),
{'_id': {'$in': doc_ids}})
# ID-sharded collections get the hash injected
doc_hashes = [IdShardedCollection._hash(doc_id) for doc_id in doc_ids]
self.assertEquals(IdShardedCollection._by_ids_key(doc_ids),
{'_id': {'$in': doc_ids},
'shard_hash': {'$in': doc_hashes}})
# unsharded and non-ID sharded collections don't have anything injected
self.assertEquals(UnshardedCollection._by_ids_key([]),
{'_id': {'$in': []}})
self.assertEquals(NonIdShardedCollection._by_ids_key([]),
{'_id': {'$in': []}})
# ID-sharded collections get the hash injected
self.assertEquals(IdShardedCollection._by_ids_key([]),
{'_id': {'$in': []},
'shard_hash': {'$in': []}})
def test_can_pickle(self):
person = Citizen(age=20)
person.save()
pickled = cPickle.dumps(person)
restored = cPickle.loads(pickled)
self.assertEqual(person, restored)
self.assertEqual(person.age, restored.age)
def test_find_raw_max_time_ms(self):
cur, _ = Citizen.find_raw({}, max_time_ms=None, limit=1)
self.assertEquals(cur._Cursor__max_time_ms, Citizen.MAX_TIME_MS)
cur, _ = Citizen.find_raw({}, max_time_ms=0, limit=1)
self.assertIsNone(cur._Cursor__max_time_ms)
cur, _ = Citizen.find_raw({}, max_time_ms=-1, limit=1)
self.assertIsNone(cur._Cursor__max_time_ms)
cur, _ = Citizen.find_raw({}, max_time_ms=1000, limit=1)
self.assertEquals(cur._Cursor__max_time_ms, 1000)
def test_max_time_ms_find(self):
col_mock = Mock()
col_mock.name = 'asdf'
doc_mock = MagicMock()
doc_mock.__iter__.return_value = ['a','b']
cur_mock = Mock()
cur_mock.collection = col_mock
cur_mock.next = MagicMock(side_effect=[doc_mock])
find_raw = MagicMock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
Citizen.find({}, max_time_ms=None)
Citizen.find({}, max_time_ms=0)
Citizen.find({}, max_time_ms=-1)
Citizen.find({}, max_time_ms=1000)
a,b,c,d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],None)
self.assertEquals(b[1]['max_time_ms'],0)
self.assertEquals(c[1]['max_time_ms'],-1)
self.assertEquals(d[1]['max_time_ms'],1000)
def test_max_time_ms_find_iter(self):
cur_mock = MagicMock()
cur_mock._iterate_cursor = MagicMock(side_effect=['a'])
find_raw = MagicMock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
Citizen._from_augmented_son = MagicMock(return_value=None)
Citizen.find_iter({}, max_time_ms=None).next()
Citizen.find_iter({}, max_time_ms=0).next()
Citizen.find_iter({}, max_time_ms=-1).next()
Citizen.find_iter({}, max_time_ms=1000).next()
a,b,c,d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],None)
self.assertEquals(b[1]['max_time_ms'],0)
self.assertEquals(c[1]['max_time_ms'],-1)
self.assertEquals(d[1]['max_time_ms'],1000)
def test_max_time_ms_find_one(self):
find_raw = MagicMock(return_value=(None, None))
Citizen.find_raw = find_raw
Citizen.find_one({}, max_time_ms=None)
Citizen.find_one({}, max_time_ms=0)
Citizen.find_one({}, max_time_ms=-1)
Citizen.find_one({}, max_time_ms=1000)
a,b,c,d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],None)
self.assertEquals(b[1]['max_time_ms'],0)
self.assertEquals(c[1]['max_time_ms'],-1)
self.assertEquals(d[1]['max_time_ms'],1000)
def test_max_time_ms_count(self):
cur_mock = Mock()
cur_mock.count = MagicMock(return_value=1)
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
Citizen.count({}, max_time_ms=None)
Citizen.count({}, max_time_ms=0)
Citizen.count({}, max_time_ms=-1)
Citizen.count({}, max_time_ms=1000)
a,b,c,d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],None)
self.assertEquals(b[1]['max_time_ms'],0)
self.assertEquals(c[1]['max_time_ms'],-1)
self.assertEquals(d[1]['max_time_ms'],1000)
def test_max_time_ms_distinct(self):
cur_mock = Mock()
cur_mock.distinct = MagicMock(return_value=1)
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
Citizen.distinct({}, '_id', max_time_ms=None)
Citizen.distinct({}, '_id', max_time_ms=0)
Citizen.distinct({}, '_id', max_time_ms=-1)
Citizen.distinct({}, '_id', max_time_ms=1000)
a,b,c,d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],None)
self.assertEquals(b[1]['max_time_ms'],0)
self.assertEquals(c[1]['max_time_ms'],-1)
self.assertEquals(d[1]['max_time_ms'],1000)
def test_timeout_value_find(self):
col_mock = Mock()
col_mock.name = 'asdf'
doc_mock = MagicMock()
doc_mock.__iter__.return_value = ['a','b']
cur_mock = Mock()
cur_mock.collection = col_mock
cur_mock.next = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = MagicMock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
self.assertEquals([],Citizen.find({}, timeout_value=[]))
self.assertEquals({},Citizen.find({}, timeout_value={}))
self.assertEquals(1,Citizen.find({}, timeout_value=1))
self.assertEquals('asdf',Citizen.find({}, timeout_value='asdf'))
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find({})
def test_timeout_value_find_one(self):
find_raw = MagicMock(return_value=(MagicMock(),Mock()))
from_augmented_son = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
Citizen._from_augmented_son = from_augmented_son
Citizen.find_raw = find_raw
self.assertEquals([],Citizen.find_one({}, timeout_value=[]))
self.assertEquals({},Citizen.find_one({}, timeout_value={}))
self.assertEquals(1,Citizen.find_one({}, timeout_value=1))
self.assertEquals('asdf',Citizen.find_one({}, timeout_value='asdf'))
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find_one({})
def test_timeout_value_count(self):
cur_mock = Mock()
cur_mock.count = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
self.assertEquals([],Citizen.count({}, timeout_value=[]))
self.assertEquals({},Citizen.count({}, timeout_value={}))
self.assertEquals(1,Citizen.count({}, timeout_value=1))
self.assertEquals('asdf',Citizen.count({}, timeout_value='asdf'))
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.count({})
def test_timeout_value_distinct(self):
cur_mock = Mock()
cur_mock.distinct = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
self.assertEquals([],Citizen.distinct({}, '_id', timeout_value=[]))
self.assertEquals({},Citizen.distinct({}, '_id', timeout_value={}))
self.assertEquals(1,Citizen.distinct({}, '_id', timeout_value=1))
self.assertEquals('asdf',Citizen.distinct({}, '_id', timeout_value='asdf'))
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.distinct({}, '_id')
def test_timeout_retry_find(self):
col_mock = Mock()
col_mock.name = 'asdf'
doc_mock = MagicMock()
doc_mock.__iter__.return_value = ['a','b']
cur_mock = Mock()
cur_mock.collection = col_mock
cur_mock.next = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = MagicMock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find({}, max_time_ms=None)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find({}, max_time_ms=Citizen.MAX_TIME_MS - 1)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find({}, max_time_ms=Citizen.MAX_TIME_MS)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find({}, max_time_ms=Citizen.MAX_TIME_MS + 1)
# should retry on the first two, should not retry on the last two
self.assertEquals(len(find_raw.call_args_list), 6)
_, a, _, b, c, d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
self.assertEquals(b[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
def test_timeout_retry_find_one(self):
find_raw = MagicMock(return_value=(MagicMock(),Mock()))
from_augmented_son = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
Citizen._from_augmented_son = from_augmented_son
Citizen.find_raw = find_raw
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find_one({}, max_time_ms=None)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find_one({}, max_time_ms=Citizen.MAX_TIME_MS - 1)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find_one({}, max_time_ms=Citizen.MAX_TIME_MS)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.find_one({}, max_time_ms=Citizen.MAX_TIME_MS + 1)
# should retry on the first two, should not retry on the last two
self.assertEquals(len(find_raw.call_args_list), 6)
_, a, _, b, c, d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
self.assertEquals(b[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
def test_timeout_retry_count(self):
cur_mock = Mock()
cur_mock.count = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.count({}, max_time_ms=None)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.count({}, max_time_ms=Citizen.MAX_TIME_MS - 1)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.count({}, max_time_ms=Citizen.MAX_TIME_MS)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.count({}, max_time_ms=Citizen.MAX_TIME_MS + 1)
# should retry on the first two, should not retry on the last two
self.assertEquals(len(find_raw.call_args_list), 6)
_, a, _, b, c, d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
self.assertEquals(b[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
def test_timeout_retry_distinct(self):
cur_mock = Mock()
cur_mock.distinct = MagicMock(
side_effect=pymongo.errors.ExecutionTimeout('asdf'))
find_raw = Mock(return_value=(cur_mock,Mock()))
Citizen.find_raw = find_raw
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.distinct({}, '_id', max_time_ms=None)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.distinct({}, '_id', max_time_ms=Citizen.MAX_TIME_MS - 1)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.distinct({}, '_id', max_time_ms=Citizen.MAX_TIME_MS)
with self.assertRaises(pymongo.errors.ExecutionTimeout):
Citizen.distinct({},'_id', max_time_ms=Citizen.MAX_TIME_MS + 1)
# should retry on the first two, should not retry on the last two
self.assertEquals(len(find_raw.call_args_list), 6)
_, a, _, b, c, d = find_raw.call_args_list
self.assertEquals(a[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
self.assertEquals(b[1]['max_time_ms'],Citizen.RETRY_MAX_TIME_MS)
if __name__ == '__main__':
unittest.main()
| ContextLogic/mongoengine | tests/document.py | Python | mit | 44,899 |
# (c) 2016, James Tanner
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
from ansible import constants as C
from ansible.module_utils._text import to_bytes
from ansible.module_utils.compat.paramiko import paramiko
_HAS_CONTROLPERSIST = {} # type: dict[str, bool]
def check_for_controlpersist(ssh_executable):
try:
# If we've already checked this executable
return _HAS_CONTROLPERSIST[ssh_executable]
except KeyError:
pass
b_ssh_exec = to_bytes(ssh_executable, errors='surrogate_or_strict')
has_cp = True
try:
cmd = subprocess.Popen([b_ssh_exec, '-o', 'ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if b"Bad configuration option" in err or b"Usage:" in err:
has_cp = False
except OSError:
has_cp = False
_HAS_CONTROLPERSIST[ssh_executable] = has_cp
return has_cp
# TODO: move to 'smart' connection plugin that subclasses to ssh/paramiko as needed.
def set_default_transport():
# deal with 'smart' connection .. one time ..
if C.DEFAULT_TRANSPORT == 'smart':
# TODO: check if we can deprecate this as ssh w/o control persist should
# not be as common anymore.
# see if SSH can support ControlPersist if not use paramiko
if not check_for_controlpersist('ssh') and paramiko is not None:
C.DEFAULT_TRANSPORT = "paramiko"
else:
C.DEFAULT_TRANSPORT = "ssh"
| ansible/ansible | lib/ansible/utils/ssh_functions.py | Python | gpl-3.0 | 2,282 |
from __future__ import print_function
import sys
import time
import random
import lasagne
import numpy as np
from objectives import *
from tabulate import tabulate
np.set_printoptions(precision=4, linewidth=150)
def get_functions(net, obj, input_x, target_y, batch_size, train_size, test_size, optimizer='nesterov',
train_clip=False, thresh=3, **params):
predict_train = lasagne.layers.get_output(net, deterministic=False, train_clip=train_clip, thresh=thresh)
accuracy_train = lasagne.objectives.categorical_accuracy(predict_train, target_y).mean()
loss_train, rw = obj(
predict_train, target_y, net, batch_size=batch_size, num_samples=train_size,
train_clip=train_clip, thresh=thresh, **params)
nll_train = ell(predict_train, target_y)
reg_train = rw*reg(net, train_clip=train_clip, thresh=thresh)
predict_test = lasagne.layers.get_output(net, thresh=thresh, deterministic=True)
accuracy_test = lasagne.objectives.categorical_accuracy(predict_test, target_y).mean()
loss_test, _ = obj(predict_test, target_y, net, batch_size=batch_size, num_samples=test_size, rw=rw, thresh=thresh, **params)
nll_test = ell(predict_test, target_y)
reg_test = rw*reg(net, train_clip=train_clip, thresh=thresh)
weights = lasagne.layers.get_all_params(net, trainable=True)
lr, beta = theano.shared(np.cast[theano.config.floatX](0)), theano.shared(np.cast[theano.config.floatX](0))
if optimizer == 'nesterov':
updates = lasagne.updates.nesterov_momentum(loss_train, weights, learning_rate=lr, momentum=beta)
elif optimizer == 'adam':
updates = lasagne.updates.adam(loss_train, weights, learning_rate=lr, beta1=beta)
else:
raise Exception('opt wtf')
train_func = theano.function(
[input_x, target_y], [loss_train, accuracy_train, nll_train, reg_train],
allow_input_downcast=True, updates=updates)
test_func = theano.function(
[input_x, target_y], [loss_test, accuracy_test, nll_test, reg_test],
allow_input_downcast=True)
def update_optimizer(new_lr, new_beta):
lr.set_value(np.cast[theano.config.floatX](new_lr))
beta.set_value(np.cast[theano.config.floatX](new_beta))
def update_regweight(new_rw):
rw.set_value(np.cast[theano.config.floatX](new_rw))
return train_func, test_func, predict_test, update_optimizer, update_regweight
def net_configuration(net, short=False):
if short:
nl = net.input_layer.nonlinearity.func_name if hasattr(net.input_layer, 'nonlinearity') else 'linear'
return "%s, %s, %s:" % (net.input_layer.name, net.input_layer.input_layer.output_shape, nl)
table = []
header = ['Layer', 'output_shape', 'parameters', 'nonlinearity']
while hasattr(net, 'input_layer'):
if hasattr(net, 'nonlinearity') and hasattr(net.nonlinearity, 'func_name'):
nl = net.nonlinearity.func_name
else:
nl = 'linear'
if net.name is not None:
table.append((net.name, net.output_shape, net.params.keys(), nl))
else:
table.append((str(net.__class__).split('.')[-1][:-2],
net.output_shape, net.params.keys(), nl))
net = net.input_layer
if hasattr(net, 'nonlinearity') and hasattr(net.nonlinearity, 'func_name'):
nl = net.nonlinearity.func_name
else:
nl = 'linear'
table.append((net.name, net.output_shape, net.params.keys(), nl))
return ">> Net Architecture\n" + tabulate(reversed(table), header, floatfmt=u'.3f') + '\n'
def iter_info(verbose, epoch, start_time, num_epochs, updates, train_info, test_info, printf, net,
optpolicy_lr, optpolicy_rw, thresh=3, **params):
if verbose and epoch % verbose == 0:
train_loss, train_acc, train_nll, train_reg = train_info[-1]
test_loss, test_acc, test_nll, test_reg = test_info[-1]
epoch_time, start_time = int(time.time() - start_time), time.time()
ard_layers = map(lambda l: l.get_ard(thresh=thresh) if 'reg' in l.__dict__ else None,
lasagne.layers.get_all_layers(net))
he = ['epo', 'upd', 'lr', 'beta', 'tr_loss', 'tr_nll', 'tr_acc',
'te_loss', 'te_nll', 'te_acc', 'reg', 'rw', 'ard', 'sec']
info = ('%s/%s' % (str(epoch).zfill(3) , num_epochs),
updates,
optpolicy_lr(epoch)[0], '\'%.2f' % optpolicy_lr(epoch)[1],
'\'%.3f' % train_loss, train_nll, '\'%.3f' % train_acc,
'\'%.3f' % test_loss, test_nll, '\'%.3f' % test_acc,
train_reg, '\'%.2f' % optpolicy_rw(epoch),
str(filter(None, ard_layers)).replace('\'', ''), epoch_time)
if epoch == 0:
printf(">> Start Learning")
printf(tabulate([info], he, floatfmt='1.1e'))
else:
printf(tabulate([info], he, tablefmt="plain", floatfmt='1.1e').split('\n')[1])
return start_time
def iterate_minibatches(inputs, targets, batchsize, shuffle=False):
assert len(inputs) == len(targets)
if shuffle:
indices = np.arange(len(inputs))
np.random.shuffle(indices)
for start_idx in xrange(0, len(inputs) - batchsize + 1, batchsize):
if shuffle:
excerpt = indices[start_idx:start_idx + batchsize]
else:
excerpt = slice(start_idx, start_idx + batchsize)
yield inputs[excerpt], targets[excerpt]
def batch_iterator_train_crop_flip(data, y, batchsize, shuffle=False):
PIXELS = 28
PAD_CROP = 4
n_samples = data.shape[0]
# Shuffles indicies of training data, so we can draw batches from random indicies instead of shuffling whole data
indx = np.random.permutation(xrange(n_samples))
for i in range((n_samples + batchsize - 1) // batchsize):
sl = slice(i * batchsize, (i + 1) * batchsize)
X_batch = data[indx[sl]]
y_batch = y[indx[sl]]
# pad and crop settings
trans_1 = random.randint(0, (PAD_CROP*2))
trans_2 = random.randint(0, (PAD_CROP*2))
crop_x1 = trans_1
crop_x2 = (PIXELS + trans_1)
crop_y1 = trans_2
crop_y2 = (PIXELS + trans_2)
# flip left-right choice
flip_lr = random.randint(0,1)
# set empty copy to hold augmented images so that we don't overwrite
X_batch_aug = np.copy(X_batch)
# for each image in the batch do the augmentation
for j in range(X_batch.shape[0]):
# for each image channel
for k in range(X_batch.shape[1]):
# pad and crop images
img_pad = np.pad(
X_batch_aug[j, k], pad_width=((PAD_CROP, PAD_CROP), (PAD_CROP, PAD_CROP)), mode='constant')
X_batch_aug[j, k] = img_pad[crop_x1:crop_x2, crop_y1:crop_y2]
# flip left-right if chosen
if flip_lr == 1:
X_batch_aug[j, k] = np.fliplr(X_batch_aug[j,k])
# fit model on each batch
yield X_batch_aug, y_batch
def train(net, train_fun, test_fun, up_opt, optpolicy_lr, up_rw, optpolicy_rw, data, num_epochs, batch_size,
verbose=1, printf=print, thresh=3, da=False):
sys.stdout.flush()
train_info, test_info = [], []
start_time, updates = time.time(), 0
X_train, y_train, X_test, y_test = data
try:
for epoch in xrange(num_epochs+1):
up_opt(*optpolicy_lr(epoch))
up_rw(optpolicy_rw(epoch))
batches, info = 0, np.zeros(4)
itera = batch_iterator_train_crop_flip if da else iterate_minibatches
end = time.time()
for inputs, targets in itera(X_train, y_train, batch_size, shuffle=True):
prev = end
begin = time.time()
info += train_fun(inputs, targets)
batches += 1
updates += 1
train_info.append(info/batches)
batches, info = 0, np.zeros(4)
for inputs, targets in itera(X_test, y_test, batch_size, shuffle=False):
info += test_fun(inputs, targets)
batches += 1
test_info.append(info/batches)
start_time = iter_info(**locals())
except KeyboardInterrupt:
print('stop train')
return net, train_info, test_info
def test_net(net, test_fun, data, ard=False):
params = ll.get_all_params(net)
paramsv = ll.get_all_param_values(net)
save_w, atallw, batches, info = 0, 0, 0, np.zeros(4)
if ard:
for i in range(len(params)):
if params[i].name == 'W':
ls2 = paramsv[i + 1] if len(paramsv[i + 1].shape) == 4 else paramsv[i + 2]
log_alpha = ls2 - np.log(paramsv[i] ** 2)
paramsv[i][log_alpha > 3] = 0
save_w += np.sum(paramsv[i] == 0)
atallw += paramsv[i].size
totalp = np.sum([p.flatten().shape[0] for p in paramsv]) - atallw
compression = 'compr_w = %s, compr_full = %s' % (save_w*1.0/atallw, save_w*1.0/totalp)
else:
compression = ''
X_train, y_train, X_test, y_test = data
for inputs, targets in iterate_minibatches(X_test, y_test, 100, shuffle=False):
info += test_fun(inputs, targets)
batches += 1
return 'acc %s ' % info[1] + compression
| ars-ashuha/variational-dropout-sparsifies-dnn | nets/utils.py | Python | gpl-3.0 | 9,372 |
# Copyright (C) 2015-2021 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import random
import shutil
import subprocess
import unittest
from contextlib import contextmanager
from uuid import uuid4
from toil import resolveEntryPoint
from toil.batchSystems.mesos.test import MesosTestSupport
from toil.test.batchSystems.parasolTestSupport import ParasolTestSupport
from toil.common import Toil
from toil.job import Job
from toil.jobStores.abstractJobStore import (JobStoreExistsException,
NoSuchJobStoreException)
from toil.leader import FailedJobsException
from toil.lib.bioio import root_logger
from toil.test import (ToilTest,
needs_aws_ec2,
needs_google,
needs_gridengine,
needs_mesos,
needs_parasol,
needs_torque,
slow)
from toil.test.sort.sort import (copySubRangeOfFile,
getMidPoint,
main,
makeFileToSort,
merge,
sort)
logger = logging.getLogger(__name__)
defaultLineLen = int(os.environ.get('TOIL_TEST_SORT_LINE_LEN', 10))
defaultLines = int(os.environ.get('TOIL_TEST_SORT_LINES', 10))
defaultN = int(os.environ.get('TOIL_TEST_SORT_N', defaultLineLen * defaultLines / 5))
@contextmanager
def runMain(options):
"""
make sure the output file is deleted every time main is run
"""
main(options)
yield
if os.path.exists(options.outputFile):
os.remove(options.outputFile)
@slow
class SortTest(ToilTest, MesosTestSupport, ParasolTestSupport):
"""
Tests Toil by sorting a file in parallel on various combinations of job stores and batch
systems.
"""
def setUp(self):
super(SortTest, self).setUp()
self.tempDir = self._createTempDir(purpose='tempDir')
self.outputFile = os.path.join(self.tempDir, 'sortedFile.txt')
self.inputFile = os.path.join(self.tempDir, "fileToSort.txt")
def tearDown(self):
if os.path.exists(self.tempDir):
shutil.rmtree(self.tempDir)
ToilTest.tearDown(self)
def _toilSort(self, jobStoreLocator, batchSystem,
lines=defaultLines, N=defaultN, testNo=1, lineLen=defaultLineLen,
retryCount=2, badWorker=0.5, downCheckpoints=False, disableCaching=False):
"""
Generate a file consisting of the given number of random lines, each line of the given
length. Sort the file with Toil by splitting the file recursively until each part is less
than the given number of bytes, sorting each part and merging them back together. Then
verify the result.
:param jobStoreLocator: The location of the job store.
:param batchSystem: the name of the batch system
:param lines: the number of random lines to generate
:param N: the size in bytes of each split
:param testNo: the number of repeats of this test
:param lineLen: the length of each random line in the file
"""
for test in range(testNo):
try:
# Specify options
options = Job.Runner.getDefaultOptions(jobStoreLocator)
options.logLevel = logging.getLevelName(root_logger.getEffectiveLevel())
options.retryCount = retryCount
options.batchSystem = batchSystem
options.clean = "never"
options.badWorker = badWorker
options.badWorkerFailInterval = 0.05
options.disableCaching = disableCaching
# This is required because mesosMasterAddress now defaults to the IP of the machine
# that is starting the workflow while the mesos *tests* run locally.
if batchSystem == 'mesos':
options.mesosMasterAddress = 'localhost:5050'
options.downCheckpoints = downCheckpoints
options.N = N
options.outputFile = self.outputFile
options.fileToSort = self.inputFile
options.overwriteOutput = True
options.realTimeLogging = True
# Make the file to sort
makeFileToSort(options.fileToSort, lines=lines, lineLen=lineLen)
# First make our own sorted version
with open(options.fileToSort, 'r') as fileHandle:
l = fileHandle.readlines()
l.sort()
# Check we get an exception if we try to restart a workflow that doesn't exist
options.restart = True
with self.assertRaises(NoSuchJobStoreException):
with runMain(options):
# Now check the file is properly sorted..
with open(options.outputFile, 'r') as fileHandle:
l2 = fileHandle.readlines()
self.assertEqual(l, l2)
options.restart = False
# Now actually run the workflow
try:
with runMain(options):
pass
i = 0
except FailedJobsException as e:
i = e.numberOfFailedJobs
# Check we get an exception if we try to run without restart on an existing store
with self.assertRaises(JobStoreExistsException):
with runMain(options):
pass
options.restart = True
# This loop tests the restart behavior
totalTrys = 1
while i != 0:
options.useExistingOptions = random.random() > 0.5
try:
with runMain(options):
pass
i = 0
except FailedJobsException as e:
i = e.numberOfFailedJobs
if totalTrys > 32: # p(fail after this many restarts) = 0.5**32
self.fail('Exceeded a reasonable number of restarts')
totalTrys += 1
finally:
subprocess.check_call([resolveEntryPoint('toil'), 'clean', jobStoreLocator])
# final test to make sure the jobStore was actually deleted
self.assertRaises(NoSuchJobStoreException, Toil.resumeJobStore, jobStoreLocator)
@needs_aws_ec2
def testAwsSingle(self):
self._toilSort(jobStoreLocator=self._awsJobStore(), batchSystem='single_machine')
@needs_aws_ec2
@needs_mesos
def testAwsMesos(self):
self._startMesos()
try:
self._toilSort(jobStoreLocator=self._awsJobStore(), batchSystem="mesos")
finally:
self._stopMesos()
@needs_mesos
def testFileMesos(self):
self._startMesos()
try:
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem="mesos")
finally:
self._stopMesos()
@needs_google
def testGoogleSingle(self):
self._toilSort(jobStoreLocator=self._googleJobStore(), batchSystem="single_machine")
@needs_google
@needs_mesos
def testGoogleMesos(self):
self._startMesos()
try:
self._toilSort(jobStoreLocator=self._googleJobStore(), batchSystem="mesos")
finally:
self._stopMesos()
def testFileSingle(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='single_machine')
def testFileSingleNonCaching(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='single_machine',
disableCaching=True)
def testFileSingleCheckpoints(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='single_machine',
retryCount=2, downCheckpoints=True)
def testFileSingle10000(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='single_machine',
lines=10000, N=10000)
@needs_gridengine
@unittest.skip('GridEngine does not support shared caching')
def testFileGridEngine(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='gridengine')
@needs_torque
@unittest.skip('PBS/Torque does not support shared caching')
def testFileTorqueEngine(self):
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='torque')
@needs_parasol
@unittest.skip("skipping until parasol support is less flaky (see github issue #449")
def testFileParasol(self):
self._startParasol()
try:
self._toilSort(jobStoreLocator=self._getTestJobStorePath(), batchSystem='parasol')
finally:
self._stopParasol()
testNo = 5
def testSort(self):
for test in range(self.testNo):
tempFile1 = os.path.join(self.tempDir, "fileToSort.txt")
makeFileToSort(tempFile1)
lines1 = self._loadFile(tempFile1)
lines1.sort()
sort(tempFile1)
with open(tempFile1, 'r') as f:
lines2 = f.readlines()
self.assertEqual(lines1, lines2)
def testMerge(self):
for test in range(self.testNo):
tempFile1 = os.path.join(self.tempDir, "fileToSort1.txt")
tempFile2 = os.path.join(self.tempDir, "fileToSort2.txt")
tempFile3 = os.path.join(self.tempDir, "mergedFile.txt")
makeFileToSort(tempFile1)
makeFileToSort(tempFile2)
sort(tempFile1)
sort(tempFile2)
with open(tempFile3, 'w') as fileHandle:
with open(tempFile1) as tempFileHandle1:
with open(tempFile2) as tempFileHandle2:
merge(tempFileHandle1, tempFileHandle2, fileHandle)
lines1 = self._loadFile(tempFile1) + self._loadFile(tempFile2)
lines1.sort()
with open(tempFile3, 'r') as f:
lines2 = f.readlines()
self.assertEqual(lines1, lines2)
def testCopySubRangeOfFile(self):
for test in range(self.testNo):
tempFile = os.path.join(self.tempDir, "fileToSort1.txt")
outputFile = os.path.join(self.tempDir, "outputFileToSort1.txt")
makeFileToSort(tempFile, lines=10, lineLen=defaultLineLen)
fileSize = os.path.getsize(tempFile)
assert fileSize > 0
fileStart = random.choice(range(0, fileSize))
fileEnd = random.choice(range(fileStart, fileSize))
with open(outputFile, 'w') as f:
f.write(copySubRangeOfFile(tempFile, fileStart, fileEnd))
with open(outputFile, 'r') as f:
l = f.read()
with open(tempFile, 'r') as f:
l2 = f.read()[fileStart:fileEnd]
self.assertEqual(l, l2)
def testGetMidPoint(self):
for test in range(self.testNo):
makeFileToSort(self.inputFile)
with open(self.inputFile, 'r') as f:
sorted_contents = f.read()
fileSize = os.path.getsize(self.inputFile)
midPoint = getMidPoint(self.inputFile, 0, fileSize)
print(f"The mid point is {midPoint} of a file of {fileSize} bytes.")
assert midPoint < fileSize
assert sorted_contents[midPoint] == '\n'
assert midPoint >= 0
def _awsJobStore(self):
return f'aws:{self.awsRegion()}:sort-test-{uuid4()}'
def _googleJobStore(self):
return f'google:{os.getenv("TOIL_GOOGLE_PROJECTID")}:sort-test-{uuid4()}'
def _loadFile(self, path):
with open(path, 'r') as f:
return f.readlines()
| BD2KGenomics/slugflow | src/toil/test/sort/sortTest.py | Python | apache-2.0 | 12,590 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oracle', '0005_auto_20140921_1050'),
]
operations = [
migrations.AlterField(
model_name='answer',
name='answer',
field=models.CharField(max_length=30, verbose_name=b'Antwort'),
),
]
| rolandgeider/pk15-orakel | oracle/migrations/0006_auto_20140921_1059.py | Python | agpl-3.0 | 426 |
import StringIO
import getpass
import json
import os
import requests
import shutil
import zipfile
from pymotion import __version__
API_RELEASES_URL = 'https://api.github.com/repos/LeResKP/pymotion/releases'
def main():
pwd = getpass.getpass('Github password for lereskp: ')
from github3 import login
gh = login('lereskp', password=pwd)
repo = gh.repository('lereskp', 'pymotion')
release = None
for rel in repo.iter_releases():
if rel.tag_name == __version__:
release = rel
break
if not release:
release = repo.create_release(
tag_name=__version__,
target_commitish='develop',
name=__version__,
body='Description',
draft=False,
prerelease=True
)
asset_ng = release.upload_asset(
content_type='application/zip',
name='pymotion-ng.zip',
asset=open('./angular/build/pymotion-ng.zip', 'rb').read()
)
filename = None
for root, dirs, files in os.walk('./dist/'):
assert(len(files) == 1)
filename = files[0]
assert(filename)
asset_py = release.upload_asset(
content_type='application/x-gzip',
name='pymotion-py.tar.gz',
asset=open('./dist/%s' % filename, 'rb').read()
)
if __name__ == '__main__':
main()
| LeResKP/pymotion | pymotion/scripts/release.py | Python | mit | 1,354 |
"""fusilier URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url, include
urlpatterns = [
url(r'^jet/', include('jet.urls', 'jet')),
url(r'^admin/', admin.site.urls),
]
| joedborg/fusilier | fusilier/urls.py | Python | agpl-3.0 | 822 |
#Virtual Topology– Chapter 3: Process Based Parallelism
from mpi4py import MPI
import numpy as np
UP = 0
DOWN = 1
LEFT = 2
RIGHT = 3
neighbour_processes = [0,0,0,0]
if __name__ == "__main__":
comm = MPI.COMM_WORLD
rank = comm.rank
size = comm.size
grid_rows = int(np.floor(np.sqrt(comm.size)))
grid_column = comm.size // grid_rows
if grid_rows*grid_column > size:
grid_column -= 1
if grid_rows*grid_column > size:
grid_rows -= 1
if (rank == 0) :
print("Building a %d x %d grid topology:"\
% (grid_rows, grid_column) )
#Bidimensional MxN Mesh
## cartesian_communicator = comm.Create_cart( (grid_rows, grid_column), periods=(False, False), reorder=True)
## my_mpi_row, my_mpi_col = cartesian_communicator.Get_coords( cartesian_communicator.rank )
# print ("rank = %s grid row = %s grid column =%s" %(rank, my_mpi_row, my_mpi_col))
#Thorus MxN
cartesian_communicator = \
comm.Create_cart( \
(grid_rows, grid_column), \
periods=(True, True), reorder=True)
my_mpi_row, my_mpi_col = \
cartesian_communicator.Get_coords\
( cartesian_communicator.rank )
## print ("rank = %s grid row = %s grid column =%s" %(rank, my_mpi_row, my_mpi_col))
##
neighbour_processes[UP], neighbour_processes[DOWN]\
= cartesian_communicator.Shift(0, 1)
neighbour_processes[LEFT], \
neighbour_processes[RIGHT] = \
cartesian_communicator.Shift(1, 1)
print ("Process = %s \
row = %s \
column = %s ----> neighbour_processes[UP] = %s \
neighbour_processes[DOWN] = %s \
neighbour_processes[LEFT] =%s neighbour_processes[RIGHT]=%s" \
%(rank, my_mpi_row, \
my_mpi_col,neighbour_processes[UP], \
neighbour_processes[DOWN], \
neighbour_processes[LEFT] , \
neighbour_processes[RIGHT]))
| IdiosyncraticDragon/Reading-Notes | Python Parallel Programming Cookbook_Code/Chapter 3/virtualTopology.py | Python | apache-2.0 | 2,072 |
"""
This sample shows how to update tan item
"""
import arcrest
from arcresthelper import securityhandlerhelper
def trace():
"""
trace finds the line, the filename
and error message and returns it
to the user
"""
import traceback, inspect
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
filename = inspect.getfile(inspect.currentframe())
# script name + line number
line = tbinfo.split(", ")[1]
# Get Python syntax error
#
synerror = traceback.format_exc().splitlines()[-1]
return line, filename, synerror
def main():
proxy_port = None
proxy_url = None
securityinfo = {}
securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI
securityinfo['username'] = "<User Name>"
securityinfo['password'] = "<Password>"
securityinfo['org_url'] = "http://www.arcgis.com"
securityinfo['proxy_url'] = proxy_url
securityinfo['proxy_port'] = proxy_port
securityinfo['referer_url'] = None
securityinfo['token_url'] = None
securityinfo['certificatefile'] = None
securityinfo['keyfile'] = None
securityinfo['client_id'] = None
securityinfo['secret_id'] = None
itemId = "<ID if item>"
upload_file = r"<Path to File>"
try:
shh = securityhandlerhelper.securityhandlerhelper(securityinfo)
if shh.valid == False:
print shh.message
else:
portalAdmin = arcrest.manageorg.Administration(securityHandler=shh.securityhandler)
uc = portalAdmin.content.usercontent()
itemParams = arcrest.manageorg.ItemParameter()
itemParams.filename = upload_file
res = uc.updateItem(itemId=itemId,
updateItemParameters=itemParams,
folderId=None,
clearEmptyFields=True,
filePath=upload_file,
url=None,
text=None,
multipart = False
)
print res
except:
line, filename, synerror = trace()
print("error on line: %s" % line)
print("error in file name: %s" % filename)
print("with error message: %s" % synerror)
if __name__ == "__main__":
main() | jgravois/ArcREST | samples/update_item.py | Python | apache-2.0 | 2,535 |
from marshmallow import Schema, fields
class PetSchema(Schema):
description = dict(id="Pet id", name="Pet name", password="Password")
id = fields.Int(dump_only=True, metadata={"description": description["id"]})
name = fields.Str(
required=True,
metadata={
"description": description["name"],
"deprecated": False,
"allowEmptyValue": False,
},
)
password = fields.Str(
load_only=True, metadata={"description": description["password"]}
)
class SampleSchema(Schema):
runs = fields.Nested("RunSchema", many=True)
count = fields.Int()
class RunSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisSchema(Schema):
sample = fields.Nested(SampleSchema)
class AnalysisWithListSchema(Schema):
samples = fields.List(fields.Nested(SampleSchema))
class PatternedObjectSchema(Schema):
count = fields.Int(dump_only=True, metadata={"x-count": 1})
count2 = fields.Int(dump_only=True, metadata={"x_count2": 2})
class SelfReferencingSchema(Schema):
id = fields.Int()
single = fields.Nested(lambda: SelfReferencingSchema())
many = fields.Nested(lambda: SelfReferencingSchema(many=True))
class OrderedSchema(Schema):
field1 = fields.Int()
field2 = fields.Int()
field3 = fields.Int()
field4 = fields.Int()
field5 = fields.Int()
class Meta:
ordered = True
class DefaultValuesSchema(Schema):
number_auto_default = fields.Int(load_default=12)
number_manual_default = fields.Int(load_default=12, metadata={"default": 42})
string_callable_default = fields.Str(load_default=lambda: "Callable")
string_manual_default = fields.Str(
load_default=lambda: "Callable", metadata={"default": "Manual"}
)
numbers = fields.List(fields.Int, load_default=list)
class CategorySchema(Schema):
id = fields.Int()
name = fields.Str(required=True)
breed = fields.Str(dump_only=True)
class CustomList(fields.List):
pass
class CustomStringField(fields.String):
pass
class CustomIntegerField(fields.Integer):
pass
| marshmallow-code/apispec | tests/schemas.py | Python | mit | 2,126 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('rules', '0035_auto_20150202_0937'),
]
operations = [
migrations.CreateModel(
name='SystemSettings',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('use_http_proxy', models.BooleanField(default=False)),
('http_proxy', models.CharField(default=b'', max_length=200, blank=True)),
('https_proxy', models.CharField(default=b'', max_length=200, blank=True)),
('use_elasticsearch', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterField(
model_name='category',
name='created_date',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 3, 14, 21, 41, 717852), verbose_name=b'date created'),
preserve_default=True,
),
migrations.AlterField(
model_name='sourceatversion',
name='updated_date',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 3, 14, 21, 41, 717121), verbose_name=b'date updated', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='sourceupdate',
name='created_date',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 3, 14, 21, 41, 717462), verbose_name=b'date of update', blank=True),
preserve_default=True,
),
]
| StamusNetworks/scirius | rules/migrations/0036_auto_20150203_1421.py | Python | gpl-3.0 | 1,713 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2011, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import unittest
from mongokit import *
import six
string_type = six.string_types[0]
class CustomTypesTestCase(unittest.TestCase):
def setUp(self):
self.connection = Connection()
self.col = self.connection['test']['mongokit']
def tearDown(self):
self.connection['test'].drop_collection('mongokit')
self.connection['test'].drop_collection('test')
def test_custom_type(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
default_values = {'date':datetime.datetime(2008, 6, 7)}
self.connection.register([Foo])
foo = self.col.Foo()
foo['_id'] = 1
foo['date'] = datetime.datetime(2003,2,1)
foo.save()
saved_foo = foo.collection.find({'_id':1}).next()
assert saved_foo == {'date': '03-02-01', '_id': 1}
foo.save()
foo2 = self.col.Foo()
foo2['_id'] = 2
foo2.save()
foo2.save()
assert foo['date'] == datetime.datetime(2003,2,1), foo['date']
foo = self.col.Foo.get_from_id(1)
assert foo['date'] == datetime.datetime(2003,2,1), foo['date']
saved_foo = foo.collection.find({'_id':1}).next()
assert saved_foo['date'] == CustomDate().to_bson(datetime.datetime(2003,2,1)), saved_foo['date']
foo2 = self.col.Foo.get_from_id(2)
assert foo2['date'] == datetime.datetime(2008,6,7), foo2
def test_custom_type2(self):
import datetime
class CustomPrice(CustomType):
mongo_type = float
python_type = string_type
def to_bson(self, value):
return float(value)
def to_python(self, value):
return str(value)
class Receipt(Document):
use_dot_notation = True
structure = {
'price': CustomPrice(),
}
self.connection.register([Receipt])
r = self.connection.test.test.Receipt()
r['_id'] = 'bla'
r['price'] = '9.99'
r.save()
r_saved = r.collection.find_one({'_id':'bla'})
assert r_saved == {'_id': 'bla', 'price': 9.9900000000000002}
def test_instance_type(self):
from bson.dbref import DBRef
from bson.objectid import ObjectId
class Bla(ObjectId):pass
class Ble(DBRef):pass
class MyDoc(Document):
structure = { "foo":ObjectId }
self.connection.register([MyDoc])
doc = self.col.MyDoc()
doc['foo'] = Ble("bla", "ble", "bli")
self.assertRaises(SchemaTypeError, doc.validate)
doc['foo'] = Bla()
doc.validate()
def test_custom_type_nested(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
'foo':{'date': CustomDate()},
}
default_values = {'foo.date':datetime.datetime(2008, 6, 7)}
self.connection.register([Foo])
foo = self.col.Foo()
foo['_id'] = 1
foo['foo']['date'] = datetime.datetime(2003,2,1)
foo.save()
foo.save()
foo2 = self.col.Foo()
foo2['_id'] = 2
foo2.save()
assert foo['foo']['date'] == datetime.datetime(2003,2,1), foo['foo']['date']
foo = self.col.Foo.get_from_id(1)
assert foo['foo']['date'] == datetime.datetime(2003,2,1)
saved_foo = foo.collection.find({'_id':1}).next()
assert saved_foo['foo']['date'] == CustomDate().to_bson(datetime.datetime(2003,2,1)), foo['foo']['date']
foo2 = self.col.Foo.get_from_id(2)
assert foo2['foo']['date'] == datetime.datetime(2008,6,7), foo2
def test_custom_type_nested_in_list(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
'foo':{'date': [CustomDate()]},
}
default_values = {'foo.date':[datetime.datetime(2008, 6, 7)]}
self.connection.register([Foo])
foo = self.col.Foo()
foo['_id'] = 1
foo['foo']['date'].append(datetime.datetime(2003,2,1))
foo.save()
foo.save()
foo1 = self.col.Foo()
foo1['_id'] = 1
foo1['foo']['date'].append(1)
self.assertRaises(SchemaTypeError, foo1.save)
foo2 = self.col.Foo()
print(foo2)
foo2['_id'] = 2
foo2.save()
print(id(foo['foo']['date']), id(foo2['foo']['date']))
assert foo == {'foo': {'date': [datetime.datetime(2008, 6, 7, 0, 0), datetime.datetime(2003, 2, 1, 0, 0)]}, '_id': 1}
foo = self.col.Foo.get_from_id(1)
assert foo == {'_id': 1, 'foo': {'date': [datetime.datetime(2008, 6, 7, 0, 0), datetime.datetime(2003, 2, 1, 0, 0)]}}
saved_foo = foo.collection.find({'_id':1}).next()
assert saved_foo == {'_id': 1, 'foo': {'date': ['08-06-07', '03-02-01']}}
foo2 = self.col.Foo.get_from_id(2)
assert foo2['foo']['date'] == [datetime.datetime(2008,6,7)], foo2
def test_bad_custom_types(self):
import datetime
class CustomDate(CustomType):
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
self.assertRaises(TypeError, CustomDate)
class CustomDate(CustomType):
mongo_type = str
self.assertRaises(TypeError, CustomDate)
class CustomDate(CustomType):
mongo_type = str
python_type = int
self.assertRaises(NotImplementedError, CustomDate().to_bson, "bla")
self.assertRaises(NotImplementedError, CustomDate().to_python, "bla")
def test_bad_custom_type_bad_python_type(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = string_type
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
default_values = {'date':datetime.datetime(2008, 6, 7)}
#self.assertRaises(DefaultFieldTypeError, self.connection.register, [Foo])
self.connection.register([Foo])
failed = False
try:
self.col.Foo()
except DefaultFieldTypeError as e:
failed = True
self.assertEqual(str(e), 'date must be an instance of %s not datetime' % six.string_types[0].__name__)
def test_custom_type_bad_python(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = str
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
default_values = {'date':datetime.datetime(2008, 6, 7)}
self.connection.register([Foo])
failed = False
try:
self.col.Foo()
except DefaultFieldTypeError as e:
failed = True
self.assertEqual(str(e),
'date must be an instance of str not datetime')
class CustomDate(CustomType):
mongo_type = str
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
default_values = {'date':(2008, 6, 7)}
self.connection.register([Foo])
failed = False
try:
self.col.Foo()
except DefaultFieldTypeError as e:
failed = True
self.assertEqual(str(e),
'date must be an instance of datetime not tuple')
class Foo(Document):
structure = {
"date": [CustomDate()],
}
default_values = {'date':[(2008, 6, 7)]}
self.connection.register([Foo])
failed = False
try:
self.col.Foo()
except DefaultFieldTypeError as e:
failed = True
self.assertEqual(str(e),
'date must be an instance of datetime not tuple')
class CustomDate(CustomType):
mongo_type = int
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
self.connection.register([Foo])
foo = self.col.Foo()
foo['_id'] = 2
foo['date'] = datetime.datetime(2003,2,1)
self.assertRaises(SchemaTypeError, foo.save)
def test_custom_type_nested_list(self):
import datetime
class CustomPrice(CustomType):
mongo_type = float
python_type = str
def to_bson(self, value):
return float(value)
def to_python(self, value):
return str(value)
class Receipt(Document):
use_dot_notation = True
structure = {
'products': [
{
'sku': str,
'qty': int,
'price': CustomPrice(),
}
]
}
self.connection.register([Receipt])
r = self.connection.test.test.Receipt()
r['_id'] = 'bla'
r.products = []
r.products.append({ 'sku': 'X-25A5F58B-61', 'qty': 1, 'price': '9.99' })
r.products.append({ 'sku': 'Z-25A5F58B-62', 'qty': 2, 'price': '2.99' })
r.save()
r_saved = r.collection.find_one({'_id':'bla'})
assert r_saved == {'_id': 'bla', 'products': [{'sku': 'X-25A5F58B-61', 'price': 9.9900000000000002, 'qty': 1}, {'sku': 'Z-25A5F58B-62', 'price': 2.9900000000000002, 'qty': 2}]}
def test_custom_type_list(self):
import datetime
class CustomPrice(CustomType):
mongo_type = float
python_type = string_type
def to_bson(self, value):
return float(value)
def to_python(self, value):
return str(value)
class Receipt(Document):
structure = {
'foo': CustomPrice(),
'price': [CustomPrice()],
'bar':{'spam':CustomPrice()},
}
self.connection.register([Receipt])
r = self.connection.test.test.Receipt()
r['_id'] = 'bla'
r['foo'] = '2.23'
r['price'].append('9.99')
r['price'].append('2.99')
r['bar']['spam'] = '3.33'
r.save()
r_saved = r.collection.find_one({'_id':'bla'})
assert r_saved == {'price': [9.9900000000000002, 2.9900000000000002], '_id': 'bla', 'bar': {'spam': 3.3300000000000001}, 'foo': 2.23}
def test_custom_type_not_serializable(self):
from decimal import Decimal
class DecimalType(CustomType):
mongo_type = str
python_type = Decimal
def to_bson(self, value):
"""convert type to a mongodb type"""
if value is not None:
return str(value)
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return Decimal(value)
class MyDocument(Document):
structure = {'amount': DecimalType()}
self.connection.register([MyDocument])
document = self.col.MyDocument()
document['amount'] = Decimal('100.00')
document.validate()
def test_required_custom_type_mongotype_dict(self):
class CustomObject(CustomType):
mongo_type = dict
python_type = float
def to_bson(self, value):
return {'f':str(value)}
def to_python(self, value):
return float(value['f'])
class MyDocument(Document):
structure = {'amount': CustomObject()}
required_fields = ['amount']
indexes = [{'fields':['amount.f'], 'check':False}]
validators = {'amount':lambda x: x > 3.0}
self.connection.register([MyDocument])
document = self.col.MyDocument()
document['_id'] = 'test'
document['amount'] = 1.00
self.assertRaises(ValidationError, document.validate)
document['amount'] = 100.00
document.save()
assert self.col.find_one() == {'amount': {'f': '100.0'}, '_id': 'test'}, self.col.find_one()
assert self.col.MyDocument.find_one() == {'amount': 100.00, '_id': 'test'}, self.col.MyDocument.find_one()
def test_custom_type_mongotype_dict_index_not_checked(self):
class CustomObject(CustomType):
mongo_type = dict
python_type = float
def to_bson(self, value):
return {'f':str(value)}
def to_python(self, value):
return float(value['f'])
failed = False
try:
class MyDocument(Document):
structure = {'amount': CustomObject()}
required_fields = ['amount']
indexes = [{'fields':['amount.f']}]
except ValueError as e:
self.assertEqual(str(e), "Error in indexes: can't find amount.f in structure")
failed = True
self.assertEqual(failed, True)
def test_missing_custom_types(self):
import datetime
class CustomDate(CustomType):
mongo_type = str
python_type = datetime.datetime
def to_bson(self, value):
"""convert type to a mongodb type"""
return str(datetime.datetime.strftime(value,'%y-%m-%d'))
def to_python(self, value):
"""convert type to a python object"""
if value is not None:
return datetime.datetime.strptime(value, '%y-%m-%d')
class Foo(Document):
structure = {
"date": CustomDate(),
}
default_values = {'date':datetime.datetime(2008, 6, 7)}
self.connection.register([Foo])
# insert a foo document without this field
self.col.insert({'_id': 1})
foo = self.col.Foo.get_from_id(1)
foo['_id'] = 1
foo['date'] = datetime.datetime(2003,2,1)
foo.save()
| aquavitae/mongokit-py3 | tests/test_custom_types.py | Python | bsd-3-clause | 19,152 |
"""Tests converting flat XML files to Gettext PO localization files"""
from io import BytesIO
from translate.convert import flatxml2po, test_convert
class TestFlatXML2PO:
def _convert(self, xmlstring, templatestring=None, **kwargs):
"""Helper that converts xml source to po target without requiring files"""
inputfile = BytesIO(xmlstring.encode())
templatefile = None
if templatestring:
templatefile = BytesIO(templatestring.encode())
outputfile = BytesIO()
converter = flatxml2po.flatxml2po(inputfile, outputfile, templatefile, **kwargs)
converter.run()
return converter.target_store, outputfile
def _convert_to_store(self, *args, **kwargs):
"""Helper that converts to target format store without using files."""
return self._convert(*args, **kwargs)[0]
def _convert_to_string(self, *args, **kwargs):
"""Helper that converts to target format string without using files."""
return self._convert(*args, **kwargs)[1].getvalue().decode("utf-8")
def _do_assert_store(self, actual):
"""Asserts whether the passed actual store contains two assumed units:
'one' => 'One'
'two' => 'Two'
(plus a header present by default)
"""
assert actual.units[0].isheader()
assert len(actual.units) == 3
one = actual.findid("one")
assert one
assert one.target == "One"
two = actual.findid("two")
assert two
assert two.target == "Two"
def test_defaults(self):
"""Test a conversion with default values."""
xmlstring = """<root>
<str key="one">One</str>
<str key="two">Two</str>
</root>
"""
actual = self._convert_to_store(xmlstring)
self._do_assert_store(actual)
def test_root_name(self):
"""Test a conversion with different root name."""
xmlstring = """<strings>
<str key="one">One</str>
<str key="two">Two</str>
</strings>
"""
actual = self._convert_to_store(xmlstring, root="strings")
self._do_assert_store(actual)
def test_value_name(self):
"""Test a conversion with different value name."""
xmlstring = """<root>
<entry key="one">One</entry>
<entry key="two">Two</entry>
</root>
"""
actual = self._convert_to_store(xmlstring, value="entry")
self._do_assert_store(actual)
def test_key(self):
"""Test a conversion with different key name."""
xmlstring = """<root>
<str name="one">One</str>
<str name="two">Two</str>
</root>
"""
actual = self._convert_to_store(xmlstring, key="name")
self._do_assert_store(actual)
def test_default_namespace(self):
"""Test a conversion with a default namespace."""
xmlstring = """<root xmlns="urn:tt:test">
<str key="one">One</str>
<str key="two">Two</str>
</root>
"""
actual = self._convert_to_store(xmlstring, ns="urn:tt:test")
self._do_assert_store(actual)
def test_namespace_prefix(self):
"""Test a conversion with a namespace prefix."""
xmlstring = """<t:root xmlns:t="urn:tt:test">
<t:str key="one">One</t:str>
<t:str key="two">Two</t:str>
</t:root>
"""
actual = self._convert_to_store(xmlstring, ns="urn:tt:test")
self._do_assert_store(actual)
def test_all_parameters(self):
"""Test a conversion with all parameters."""
xmlstring = """<fancy xmlns="urn:tt:test">
<stuff dude="one">One</stuff>
<stuff dude="two">Two</stuff>
</fancy>
"""
actual = self._convert_to_store(
xmlstring, root="fancy", value="stuff", key="dude", ns="urn:tt:test"
)
self._do_assert_store(actual)
def test_empty_file_is_empty_store(self):
"""Test a conversion that starts with an empty file.
This must not trigger the element name validation
or cause other issues. An empty store is expected.
"""
xmlstring = "<root/>"
actual = self._convert_to_store(xmlstring)
assert actual
assert actual.units[0].isheader()
assert len(actual.units) == 1
class TestFlatXML2POCommand(test_convert.TestConvertCommand):
"""Tests running actual flatxml2po commands on files"""
convertmodule = flatxml2po
def test_help(self, capsys):
"""tests getting help"""
options = super().test_help(capsys)
options = self.help_check(options, "-r ROOT, --root=ROOT")
options = self.help_check(options, "-v VALUE, --value=VALUE")
options = self.help_check(options, "-k KEY, --key=KEY")
options = self.help_check(options, "-n NS, --namespace=NS", last=True)
| miurahr/translate | translate/convert/test_flatxml2po.py | Python | gpl-2.0 | 4,950 |
from dateutil.parser import parse
class Object(object):
"""
Generic object returned by the API.
Each class that derives from this one declares a number of attributes that
are used to navigate the data returned by the API. For example, Droplet
objects contain an embedded Region, which contains one or more Sizes.
"""
_SPECIAL = {} # derived classes override this
def __init__(self, credentials, data=None):
self._credentials, self._data = credentials, data
def __repr__(self):
return '<%s>' % self.__class__.__name__
def __getattr__(self, key):
return self._SPECIAL[key](self) if key in self._SPECIAL else self._data[key]
def _date(self, key):
"""
Convert an ISO 8601 date to datetime.
"""
return parse(self._data[key])
def _filter(self, endpoint, class_, key, condition):
"""
Return a generator that filters based on a condition.
"""
for item in self._multi(endpoint, class_, key):
if condition(item):
yield item
def _multi(self, endpoint, class_, key):
"""
Return a generator for items at the specified endpoint.
"""
for item in self._credentials.request(endpoint)[key]:
yield class_(self._credentials, item)
def _single(self, class_, key):
"""
Return an embedded instance of the specified class.
"""
return class_(self._credentials, self._data[key])
class Account(Object):
_SPECIAL = {
'actions': lambda x: x._multi('/actions', Action, 'actions'),
'domains': lambda x: x._multi('/domains', Domain, 'domains'),
'droplets': lambda x: x._multi('/droplets', Droplet, 'droplets'),
'images': lambda x: x._multi('/images', Image, 'images'),
'keys': lambda x: x._multi('/account/keys', SSHKey, 'ssh_keys'),
'regions': lambda x: x._multi('/regions', Region, 'regions'),
'sizes': lambda x: x._multi('/sizes', Size, 'sizes'),
}
class Action(Object):
_SPECIAL = {
'completed_at': lambda x: x._date('completed_at'),
'region': lambda x: x._filter('/regions', Region, 'regions', lambda y: x._data['region'] == y.slug).next(),
'started_at': lambda x: x._date('started_at'),
}
# TODO: implement a 'resource' property that automatically
# fetches the appropriate resource based on ID and type
class Backup(Object):
pass
class Domain(Object):
_SPECIAL = {
'records': lambda x: x._multi('/domains/%s/records' % x.name, DomainRecord, 'domain_records'),
}
class DomainRecord(Object):
pass
class Droplet(Object):
_SPECIAL = {
'actions': lambda x: x._multi('/droplets/%d/actions' % x.id, Action, 'actions'),
'backups': lambda x: x._multi('/droplets/%d/backups' % x.id, Backup, 'backups'),
'created_at': lambda x: x._date('created_at'),
'image': lambda x: x._single(Image, 'image'),
'kernel': lambda x: x._single(Kernel, 'kernel'),
'kernels': lambda x: x._multi('/droplets/%d/kernels' % x.id, Kernel, 'kernels'),
'region': lambda x: x._single(Region, 'region'),
'size': lambda x: x._single(Size, 'size'),
'snapshots': lambda x: x._multi('/droplets/%d/snapshots' % x.id, Snapshot, 'snapshots'),
}
class Image(Object):
_SPECIAL = {
'actions': lambda x: x._multi('/images/%d/actions' % x.id, Action, 'actions'),
'created_at': lambda x: x._date('created_at'),
'regions': lambda x: x._filter('/regions', Region, 'regions', lambda y: y.slug in x._data['regions']),
}
class Kernel(Object):
pass
class Region(Object):
_SPECIAL = {
'sizes': lambda x: x._filter('/sizes', Size, 'sizes', lambda y: y.slug in x._data['sizes']),
}
class Size(Object):
_SPECIAL = {
'regions': lambda x: x._filter('/regions', Region, 'regions', lambda y: y.slug in x._data['regions']),
}
class Snapshot(Object):
_SPECIAL = {
'regions': lambda x: x._filter('/regions', Region, 'regions', lambda y: y.slug in x._data['regions']),
}
class SSHKey(Object):
pass
| nathan-osman/pypail | pypail/objects.py | Python | mit | 4,196 |
# Copyright 2019 Rubén Bravo <rubenred18@gmail.com>
# Copyright 2020 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
from odoo import models
class StockRule(models.Model):
_inherit = "stock.rule"
def _prepare_mo_vals(
self,
product_id,
product_qty,
product_uom,
location_id,
name,
origin,
company_id,
values,
bom,
):
res = super()._prepare_mo_vals(
product_id,
product_qty,
product_uom,
location_id,
name,
origin,
company_id,
values,
bom,
)
res["source_procurement_group_id"] = (
values.get("group_id").id if values.get("group_id", False) else False
)
return res
| OCA/manufacture | mrp_sale_info/models/stock_rule.py | Python | agpl-3.0 | 867 |
import sys
import numpy as np
from scipy.fftpack import fft2, ifft2, fftshift
from matplotlib import pylab as pl
# data = []
# with open('data.dat', 'r') as datafile:
# for row in datafile:
# data.append(list(map(complex, row[:-2].split(' '))))
# data = np.array(data)
#
# weight = []
# with open('uvcov.dat', 'r') as uvcovf:
# for row in uvcovf:
# weight.append(list(map(float, row[:-2].split(' '))))
# weight = np.array(weight)
data = np.array([])
weight = np.array([])
data = np.load('data.npy')[sys.argv[1],:,:]
weight = np.load('weight.npy')[sys.argv[1],:,:]
# pl.ion()
pl.figure(1)
pl.clf()
pl.imshow(np.real(fftshift(fft2(fftshift(data)))).transpose(), origin='lower', interpolation='nearest')
# pl.imshow(np.real(fftshift(fft2(fftshift(data)))).transpose(), origin='lower', interpolation='nearest', vmax=5e-3, vmin=-5e-3)
pl.colorbar()
print('peak flux:', str(np.real(np.sum(data))*1e3)+'mJy')
pl.figure(2)
pl.clf()
pl.imshow(np.real(data).transpose(), origin='lower', interpolation='nearest')
# vmax=0.5e-4, vmin=-0.1e-4)
pl.colorbar()
pl.figure(3)
pl.clf()
pl.imshow(weight.transpose(), origin='lower', interpolation='nearest')
pl.colorbar()
pl.show()
| centowen/cudaGrid | show_image.py | Python | gpl-2.0 | 1,212 |
# -*- coding: utf-8 -*-
from appium.webdriver.common.touch_action import TouchAction
from AppiumLibrary.locators import ElementFinder
from .keywordgroup import KeywordGroup
class _TouchKeywords(KeywordGroup):
def __init__(self):
self._element_finder = ElementFinder()
# Public, element lookups
def zoom(self, locator, percent="200%", steps=1):
"""
Zooms in on an element a certain amount.
"""
driver = self._current_application()
element = self._element_find(locator, True, True)
driver.zoom(element=element, percent=percent, steps=steps)
def pinch(self, locator, percent="200%", steps=1):
"""
Pinch in on an element a certain amount.
"""
driver = self._current_application()
element = self._element_find(locator, True, True)
driver.pinch(element=element, percent=percent, steps=steps)
def swipe(self, start_x, start_y, offset_x, offset_y, duration=1000):
"""
Swipe from one point to another point, for an optional duration.
Args:
- start_x - x-coordinate at which to start
- start_y - y-coordinate at which to start
- offset_x - x-coordinate distance from start_x at which to stop
- offset_y - y-coordinate distance from start_y at which to stop
- duration - (optional) time to take the swipe, in ms.
Usage:
| Swipe | 500 | 100 | 100 | 0 | 1000 |
_*NOTE: *_
Android 'Swipe' is not working properly, use ``offset_x`` and ``offset_y`` as if these are destination points.
"""
driver = self._current_application()
driver.swipe(start_x, start_y, offset_x, offset_y, duration)
def swipe_by_percent(self, start_x, start_y, end_x, end_y, duration=1000):
"""
Swipe from one percent of the screen to another percent, for an optional duration.
Normal swipe fails to scale for different screen resolutions, this can be avoided using percent.
Args:
- start_x - x-percent at which to start
- start_y - y-percent at which to start
- end_x - x-percent distance from start_x at which to stop
- end_y - y-percent distance from start_y at which to stop
- duration - (optional) time to take the swipe, in ms.
Usage:
| Swipe By Percent | 90 | 50 | 10 | 50 | # Swipes screen from right to left. |
_*NOTE: *_
This also considers swipe acts different between iOS and Android.
New in AppiumLibrary 1.4.5
"""
width = self.get_window_width()
height = self.get_window_height()
x_start = float(start_x) / 100 * width
x_end = float(end_x) / 100 * width
y_start = float(start_y) / 100 * height
y_end = float(end_y) / 100 * height
x_offset = x_end - x_start
y_offset = y_end - y_start
platform = self._get_platform()
if platform == 'android':
self.swipe(x_start, y_start, x_end, y_end, duration)
else:
self.swipe(x_start, y_start, x_offset, y_offset, duration)
def scroll(self, start_locator, end_locator):
"""
Scrolls from one element to another
Key attributes for arbitrary elements are `id` and `name`. See
`introduction` for details about locating elements.
"""
el1 = self._element_find(start_locator, True, True)
el2 = self._element_find(end_locator, True, True)
driver = self._current_application()
driver.scroll(el1, el2)
def scroll_down(self, locator):
"""Scrolls down to element"""
driver = self._current_application()
element = self._element_find(locator, True, True)
driver.execute_script("mobile: scroll", {"direction": 'down', 'element': element.id})
def scroll_up(self, locator):
"""Scrolls up to element"""
driver = self._current_application()
element = self._element_find(locator, True, True)
driver.execute_script("mobile: scroll", {"direction": 'up', 'element': element.id})
def long_press(self, locator):
""" Long press the element """
driver = self._current_application()
element = self._element_find(locator, True, True)
long_press = TouchAction(driver).long_press(element)
long_press.perform()
def tap(self, locator, x_offset=None, y_offset=None, count=1):
""" Tap element identified by ``locator``.
Args:
- ``x_offset`` - (optional) x coordinate to tap, relative to the top left corner of the element.
- ``y_offset`` - (optional) y coordinate. If y is used, x must also be set, and vice versa
- ``count`` - can be used for multiple times of tap on that element
"""
driver = self._current_application()
el = self._element_find(locator, True, True)
action = TouchAction(driver)
action.tap(el,x_offset,y_offset, count).perform()
def click_a_point(self, x=0, y=0, duration=100):
""" Click on a point"""
self._info("Clicking on a point (%s,%s)." % (x,y))
driver = self._current_application()
action = TouchAction(driver)
try:
action.press(x=float(x), y=float(y)).wait(float(duration)).release().perform()
except:
assert False, "Can't click on a point at (%s,%s)" % (x,y)
def click_element_at_coordinates(self, coordinate_X, coordinate_Y):
""" click element at a certain coordinate """
self._info("Pressing at (%s, %s)." % (coordinate_X, coordinate_Y))
driver = self._current_application()
action = TouchAction(driver)
action.press(x=coordinate_X, y=coordinate_Y).release().perform()
| yahman72/robotframework-appiumlibrary | AppiumLibrary/keywords/_touch.py | Python | apache-2.0 | 5,942 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
#!/usr/bin/env python
from __future__ import division, unicode_literals
"""
#TODO: Write module doc.
"""
__author__ = 'Shyue Ping Ong'
__copyright__ = 'Copyright 2013, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Shyue Ping Ong'
__email__ = 'ongsp@ucsd.edu'
__date__ = '8/1/15'
import warnings
warnings.warn("pymatgen.io.vaspio.vasp_output has been moved "
"pymatgen.io.vasp.outputs "
"This stub will be removed in pymatgen 4.0.", DeprecationWarning)
from pymatgen.io.vasp.outputs import *
| Bismarrck/pymatgen | pymatgen/io/vaspio/vasp_output.py | Python | mit | 652 |
from ..structure import *
from ..utils import *
# Adaptive parent proposal
class AMCMC_BN(MCMC):
def __init__(self, problem, verbose_int = 100, N = 1000, T = 10000, record_start = 3000):
MCMC.__init__(self, problem, "AMCMC_BN", verbose_int, N, T, record_start)
def particle_to_tuple(self, p):
return p[0]
def init_particle(self):
t = tuple(np.random.choice(self.problem.net[rv].values) for rv in self.problem.rvs)
return t, self.log_prob_tuple(t)
def init(self):
self.proposal = defaultdict(lambda: defaultdict(lambda: [{}, 0]))
for rv in self.problem.rvs:
obj = self.problem.net[rv]
for k in obj.dict:
for val in obj.values:
# Similar idea as Laplace's Rule of Succession
# each particle gives each value 0.5 fake 'visits'
v = obj.dict[k].pmf(val) * self.N / 2.0
self.proposal[rv][k][0][val] = v
self.proposal[rv][k][1] += v
def sample(self, rv, key):
total = self.proposal[rv][key][1]
r = np.random.uniform(high = total)
for k, v in self.proposal[rv][key][0].items():
r -= v
if r <= 0:
return k, v / float(total)
raise Exception("Unreached")
def update_particle(self, particle):
net = self.problem.net
rvs = self.problem.rvs
t, l = particle
d = self.tuple_to_dict(t)
rv = rvs[np.random.choice(len(rvs))]
obj = net[rv]
parent_t = obj._dict_to_tuple(d)
np_rv = obj.dict[parent_t]
sampled, prob = self.sample(rv, parent_t)
new_d = d.copy()
new_d[rv] = sampled
log_a = -l - log(prob)
new_l = l + np_rv.logpmf(sampled) - np_rv.logpmf(d[rv])
for c in obj.children:
new_l += net[c][new_d].logpmf(d[c]) - net[c][d].logpmf(d[c])
log_a += new_l + log(self.proposal[rv][parent_t][0][d[rv]]) - log(self.proposal[rv][parent_t][1])
a = min(1, exp(log_a))
if self.bernoulli(a):
for rv, val in new_d.items():
if rv not in rvs:
continue
key_t = self.problem.net[rv]._dict_to_tuple(new_d)
self.proposal[rv][key_t][0][val] += 1
self.proposal[rv][key_t][1] += 1
return self.dict_to_tuple(new_d), new_l
else:
for rv, val in d.items():
if rv not in rvs:
continue
key_t = self.problem.net[rv]._dict_to_tuple(d)
self.proposal[rv][key_t][0][val] += 1
self.proposal[rv][key_t][1] += 1
return particle
| SsnL/amcmc | inference/amcmc_bn.py | Python | mit | 2,739 |
# coding=utf-8
from elections.tests import VotaInteligenteTestCase as TestCase
from elections.models import Election, QuestionCategory, Candidate
from django.urls import reverse
from candidator.models import TakenPosition, Position
from elections.models import Topic
from popolo.models import Person
from candidator.comparer import InformationHolder
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.contrib.sites.models import Site
from constance.test import override_config
class CandidateInElectionsViewsTestCase(TestCase):
def setUp(self):
super(CandidateInElectionsViewsTestCase, self).setUp()
self.tarapaca = Election.objects.get(id=1)
self.coquimbo = Election.objects.get(id=2)
def test_url_candidate(self):
url = reverse('candidate_detail_view', kwargs={
'election_slug': self.tarapaca.slug,
'slug': self.tarapaca.candidates.all()[0].id
})
self.assertTrue(url)
def test_url_duplicated(self):
candidate = self.coquimbo.candidates.get(id=1)
candidate.slug = self.tarapaca.candidates.all()[0].id
candidate.save()
url_2 = reverse('candidate_detail_view', kwargs={
'election_slug': self.coquimbo.slug,
'slug': candidate.slug
})
response = self.client.get(url_2)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.context['election'], self.coquimbo)
self.assertEqual(response.context['candidate'], candidate)
@override_config(CANDIDATE_ABSOLUTE_URL_USING_AREA=False)
def test_candidate_get_absolute_url(self):
candidate = self.coquimbo.candidates.get(id=1)
candidate.slug = self.tarapaca.candidates.all()[0].slug
candidate.save()
url_2 = reverse('candidate_detail_view', kwargs={
'election_slug': self.coquimbo.slug,
'slug': candidate.slug
})
self.assertEquals(candidate.get_absolute_url(), url_2)
@override_config(CANDIDATE_ABSOLUTE_URL_USING_AREA=True)
def test_candidate_get_absolute_url_with_area(self):
candidate = self.coquimbo.candidates.get(id=1)
url = reverse('candidate_detail_view_area', kwargs={
'area_slug': self.tarapaca.area.slug,
'slug': candidate.slug
})
self.assertEquals(candidate.get_absolute_url(), url)
url_2 = reverse('candidate_detail_view', kwargs={
'election_slug': self.coquimbo.slug,
'slug': candidate.slug
})
response = self.client.get(candidate.get_absolute_url())
self.assertEquals(response.status_code, 200)
response1 = self.client.get(url_2)
self.assertEquals(response.content, response1.content)
def test_url_is_reachable(self):
url = reverse('candidate_detail_view', kwargs={
'election_slug': self.tarapaca.slug,
'slug': self.tarapaca.candidates.all()[0].slug
})
self.assertTrue(url)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertIn('election', response.context)
self.assertEqual(response.context['election'], self.tarapaca)
self.assertEqual(response.context['candidate'], self.tarapaca.candidates.all()[0])
self.assertTemplateUsed(response, 'elections/candidate_detail.html')
self.assertTemplateUsed(response, 'base.html')
def test_candidates_ogp(self):
site = Site.objects.get_current()
candidate = self.coquimbo.candidates.get(id=1)
self.assertTrue(candidate.ogp_enabled)
self.assertIn(candidate.name, candidate.ogp_title())
self.assertEquals('website', candidate.ogp_type())
expected_url = "http://%s%s" % (site.domain,
candidate.get_absolute_url())
self.assertEquals(expected_url, candidate.ogp_url())
expected_url = "http://%s%s" % (site.domain,
static('img/logo_vi_og.jpg'))
self.assertEquals(expected_url, candidate.ogp_image())
class QuestionaryInElectionsViewTestCase(TestCase):
def setUp(self):
super(QuestionaryInElectionsViewTestCase, self).setUp()
self.tarapaca = Election.objects.get(id=1)
def test_url_question(self):
url = reverse('questionary_detail_view',
kwargs={'slug': self.tarapaca.slug})
self.assertTrue(url)
def test_url_is_reachable(self):
url = reverse('questionary_detail_view',
kwargs={'slug': self.tarapaca.slug})
self.assertTrue(url)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.context["election"], self.tarapaca)
self.assertTemplateUsed(response, 'elections/election_questionary.html')
class FaceToFaceViewTestCase(TestCase):
def setUp(self):
super(FaceToFaceViewTestCase, self).setUp()
self.tarapaca = Election.objects.get(id=1)
def test_url_face_to_face_two_candidate(self):
url = reverse('face_to_face_two_candidates_detail_view',
kwargs={
'slug': self.tarapaca.slug,
'slug_candidate_one': self.tarapaca.candidates.all()[0].id,
'slug_candidate_two': self.tarapaca.candidates.all()[1].id,
})
self.assertTrue(url)
def test_url_face_to_face_one_candidate(self):
url = reverse('face_to_face_one_candidate_detail_view',
kwargs={
'slug': self.tarapaca.slug,
'slug_candidate_one': self.tarapaca.candidates.all()[0].id
})
self.assertTrue(url)
def test_url_face_to_face_no_candidate(self):
url = reverse('face_to_face_no_candidate_detail_view',
kwargs={
'slug': self.tarapaca.slug
})
self.assertTrue(url)
def test_url_is_reachable_for_two_candidates(self):
url = reverse('face_to_face_two_candidates_detail_view',
kwargs={
'slug': self.tarapaca.slug,
'slug_candidate_one': self.tarapaca.candidates.all()[0].slug,
'slug_candidate_two': self.tarapaca.candidates.all()[1].slug,
})
self.assertTrue(url)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'elections/compare_candidates.html')
self.assertIn('first_candidate', response.context)
self.assertEqual(response.context['first_candidate'], self.tarapaca.candidates.all()[0])
self.assertIn('second_candidate', response.context)
self.assertEqual(response.context['second_candidate'], self.tarapaca.candidates.all()[1])
def test_url_does_not_throw_errors_if_any_candidate_does_not_exist(self):
url = reverse('face_to_face_two_candidates_detail_view',
kwargs={
'slug': self.tarapaca.slug,
'slug_candidate_one': self.tarapaca.candidates.all()[0].slug,
'slug_candidate_two': 'i-do-not-exist',
})
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertEqual(response.context['first_candidate'], self.tarapaca.candidates.all()[0])
self.assertNotIn('second_candidate', response.context)
def test_url_is_reachable_for_one_candidates(self):
url = reverse('face_to_face_one_candidate_detail_view',
kwargs={
'slug': self.tarapaca.slug,
'slug_candidate_one': self.tarapaca.candidates.all()[1].slug,
})
self.assertTrue(url)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'elections/compare_candidates.html')
self.assertIn('first_candidate', response.context)
self.assertEqual(response.context['first_candidate'], self.tarapaca.candidates.all()[1])
def test_url_is_reachable_for_no_one_candidates(self):
url = reverse('face_to_face_no_candidate_detail_view',
kwargs={
'slug': self.tarapaca.slug,
})
self.assertTrue(url)
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'elections/compare_candidates.html')
| lfalvarez/votai | elections/tests/candidatorg_views_tests.py | Python | gpl-3.0 | 8,504 |
"""ProjectFormulario URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from Formulario import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', views.principal, name='principal')
]
| Curso-OpenShift/Formulario | OverFlow/ProjectFormulario/ProjectFormulario/urls.py | Python | gpl-3.0 | 852 |
from __future__ import absolute_import
from __future__ import unicode_literals
from datetime import timedelta
from django import forms
from django.db.models import Q
from django.db.models.sql.constants import QUERY_TERMS
from django.utils import six
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from .fields import (
CSVField, RangeField, LookupTypeField, Lookup, DateRangeField,
TimeRangeField)
from .widgets import CommaSeparatedValueWidget
__all__ = [
'Filter', 'CharFilter', 'BooleanFilter', 'ChoiceFilter',
'TypedChoiceFilter', 'MultipleChoiceFilter', 'DateFilter',
'DateTimeFilter', 'TimeFilter', 'ModelChoiceFilter',
'ModelMultipleChoiceFilter', 'NumberFilter', 'NumericRangeFilter', 'RangeFilter',
'DateRangeFilter', 'DateFromToRangeFilter', 'TimeRangeFilter',
'AllValuesFilter', 'MethodFilter'
]
LOOKUP_TYPES = sorted(QUERY_TERMS)
class Filter(object):
creation_counter = 0
field_class = forms.Field
def __init__(self, name=None, label=None, widget=None, action=None,
lookup_type='exact', required=False, distinct=False, exclude=False, **kwargs):
self.name = name
self.label = label
if action:
self.filter = action
self.lookup_type = lookup_type
self.widget = widget
self.required = required
self.extra = kwargs
self.distinct = distinct
self.exclude = exclude
self.creation_counter = Filter.creation_counter
Filter.creation_counter += 1
@property
def field(self):
if not hasattr(self, '_field'):
help_text = _('This is an exclusion filter') if self.exclude else ''
if (self.lookup_type is None or
isinstance(self.lookup_type, (list, tuple))):
if self.lookup_type is None:
lookup = [(x, x) for x in LOOKUP_TYPES]
else:
lookup = [
(x, x) for x in LOOKUP_TYPES if x in self.lookup_type]
self._field = LookupTypeField(self.field_class(
required=self.required, widget=self.widget, **self.extra),
lookup, required=self.required, label=self.label, help_text=help_text)
else:
self._field = self.field_class(required=self.required,
label=self.label, widget=self.widget,
help_text=help_text, **self.extra)
return self._field
def filter(self, qs, value):
if isinstance(value, Lookup):
lookup = six.text_type(value.lookup_type)
value = value.value
else:
lookup = self.lookup_type
if value in ([], (), {}, None, ''):
return qs
method = qs.exclude if self.exclude else qs.filter
qs = method(**{'%s__%s' % (self.name, lookup): value})
if self.distinct:
qs = qs.distinct()
return qs
class CharFilter(Filter):
field_class = forms.CharField
class BooleanFilter(Filter):
field_class = forms.NullBooleanField
def filter(self, qs, value):
if value is not None:
return qs.filter(**{self.name: value})
return qs
class ChoiceFilter(Filter):
field_class = forms.ChoiceField
class TypedChoiceFilter(Filter):
field_class = forms.TypedChoiceField
class MultipleChoiceFilter(Filter):
"""
This filter preforms OR(by default) or AND(using conjoined=True) query
on the selected options.
Advanced Use
------------
Depending on your application logic, when all or no choices are selected, filtering may be a noop. In this case you may wish to avoid the filtering overhead, particularly of the `distinct` call.
Set `always_filter` to False after instantiation to enable the default `is_noop` test.
Override `is_noop` if you require a different test for your application.
"""
field_class = forms.MultipleChoiceField
always_filter = True
def __init__(self, *args, **kwargs):
conjoined = kwargs.pop('conjoined', False)
self.conjoined = conjoined
super(MultipleChoiceFilter, self).__init__(*args, **kwargs)
def is_noop(self, qs, value):
"""
Return True to short-circuit unnecessary and potentially slow filtering.
"""
if self.always_filter:
return False
# A reasonable default for being a noop...
if self.required and len(value) == len(self.field.choices):
return True
return False
def filter(self, qs, value):
value = value or () # Make sure we have an iterable
if self.is_noop(qs, value):
return qs
# Even though not a noop, no point filtering if empty
if not value:
return qs
if self.conjoined:
for v in value:
qs = qs.filter(**{self.name: v})
return qs
q = Q()
for v in value:
q |= Q(**{self.name: v})
return qs.filter(q).distinct()
class DateFilter(Filter):
field_class = forms.DateField
class DateTimeFilter(Filter):
field_class = forms.DateTimeField
class TimeFilter(Filter):
field_class = forms.TimeField
class ModelChoiceFilter(Filter):
field_class = forms.ModelChoiceField
class ModelMultipleChoiceFilter(MultipleChoiceFilter):
field_class = forms.ModelMultipleChoiceField
class NumberFilter(Filter):
field_class = forms.DecimalField
class NumericRangeFilter(Filter):
field_class = RangeField
def filter(self, qs, value):
if value:
if value.start and value.stop:
lookup = '%s__%s' % (self.name, self.lookup_type)
return qs.filter(**{lookup: (value.start, value.stop)})
else:
if value.start:
qs = qs.filter(**{'%s__startswith' % self.name: value.start})
if value.stop:
qs = qs.filter(**{'%s__endswith' % self.name: value.stop})
return qs
class RangeFilter(Filter):
field_class = RangeField
def filter(self, qs, value):
if value:
if value.start and value.stop:
lookup = '%s__range' % self.name
return qs.filter(**{lookup: (value.start, value.stop)})
else:
if value.start:
qs = qs.filter(**{'%s__gte'%self.name:value.start})
if value.stop:
qs = qs.filter(**{'%s__lte'%self.name:value.stop})
return qs
_truncate = lambda dt: dt.replace(hour=0, minute=0, second=0)
class DateRangeFilter(ChoiceFilter):
options = {
'': (_('Any date'), lambda qs, name: qs.all()),
1: (_('Today'), lambda qs, name: qs.filter(**{
'%s__year' % name: now().year,
'%s__month' % name: now().month,
'%s__day' % name: now().day
})),
2: (_('Past 7 days'), lambda qs, name: qs.filter(**{
'%s__gte' % name: _truncate(now() - timedelta(days=7)),
'%s__lt' % name: _truncate(now() + timedelta(days=1)),
})),
3: (_('This month'), lambda qs, name: qs.filter(**{
'%s__year' % name: now().year,
'%s__month' % name: now().month
})),
4: (_('This year'), lambda qs, name: qs.filter(**{
'%s__year' % name: now().year,
})),
5: (_('Yesterday'), lambda qs, name: qs.filter(**{
'%s__year' % name: now().year,
'%s__month' % name: now().month,
'%s__day' % name: (now() - timedelta(days=1)).day,
})),
}
def __init__(self, *args, **kwargs):
kwargs['choices'] = [
(key, value[0]) for key, value in six.iteritems(self.options)]
super(DateRangeFilter, self).__init__(*args, **kwargs)
def filter(self, qs, value):
try:
value = int(value)
except (ValueError, TypeError):
value = ''
return self.options[value][1](qs, self.name)
class DateFromToRangeFilter(RangeFilter):
field_class = DateRangeField
class TimeRangeFilter(RangeFilter):
field_class = TimeRangeField
class AllValuesFilter(ChoiceFilter):
@property
def field(self):
qs = self.model._default_manager.distinct()
qs = qs.order_by(self.name).values_list(self.name, flat=True)
self.extra['choices'] = [(o, o) for o in qs]
return super(AllValuesFilter, self).field
class MethodFilter(Filter):
"""
This filter will allow you to run a method that exists on the filterset class
"""
def __init__(self, *args, **kwargs):
# Get the action out of the kwargs
action = kwargs.get('action', None)
# If the action is a string store the action and set the action to our own filter method
# so it can be backwards compatible and work as expected, the parent will still treat it as
# a filter that has an action
self.parent_action = ''
text_types = (str, six.text_type)
if type(action) in text_types:
self.parent_action = str(action)
kwargs.update({
'action': self.filter
})
# Call the parent
super(MethodFilter, self).__init__(*args, **kwargs)
def filter(self, qs, value):
"""
This filter method will act as a proxy for the actual method we want to
call.
It will try to find the method on the parent filterset,
if not it attempts to search for the method `field_{{attribute_name}}`.
Otherwise it defaults to just returning the queryset.
"""
parent = getattr(self, 'parent', None)
parent_filter_method = getattr(parent, self.parent_action, None)
if not parent_filter_method:
func_str = 'filter_{0}'.format(self.name)
parent_filter_method = getattr(parent, func_str, None)
if parent_filter_method is not None:
return parent_filter_method(qs, value)
return qs
class CSVFilter(Filter):
field_class = CSVField
def __init__(self, lookup_type='in', *args, **kwargs):
super(CSVFilter, self).__init__(
lookup_type=lookup_type, *args, **kwargs)
| zoidbergwill/django-filter | django_filters/filters.py | Python | bsd-3-clause | 10,300 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import jinja2
from django.template.loader import render_to_string
from django_jinja import library
from lib.l10n_utils import get_locale
from bedrock.newsletter.forms import NewsletterFooterForm
log = logging.getLogger(__name__)
@library.global_function
@jinja2.contextfunction
def email_newsletter_form(ctx, newsletters='mozilla-and-you', title=None,
subtitle=None, desc=None, include_country=True,
include_language=True, details=None,
use_thankyou=True, thankyou_head=None,
thankyou_content=None, footer=True,
process_form=True, include_title=None,
submit_text=None, button_class=None,
spinner_color=None, protocol_component=False,
email_label=None, email_placeholder=None):
request = ctx['request']
context = ctx.get_all()
success = bool(ctx.get('success'))
if success and not use_thankyou:
return
form = ctx.get('newsletter_form', None)
if not form:
form = NewsletterFooterForm(newsletters, get_locale(request))
context.update(dict(
id=newsletters,
title=title,
subtitle=subtitle, # nested in/depends on include_title
desc=desc, # nested in/depends on include_title
include_country=include_country,
include_language=include_language,
details=details,
use_thankyou=use_thankyou,
thankyou_head=thankyou_head,
thankyou_content=thankyou_content,
footer=footer,
include_title=include_title if include_title is not None else footer,
form=form,
submit_text=submit_text,
button_class=button_class,
spinner_color=spinner_color,
success=success,
email_label=email_label,
email_placeholder=email_placeholder,
))
template_name = 'newsletter/includes/form-protocol.html' if protocol_component else 'newsletter/includes/form.html'
html = render_to_string(template_name, context, request=request)
return jinja2.Markup(html)
| ericawright/bedrock | bedrock/newsletter/templatetags/helpers.py | Python | mpl-2.0 | 2,361 |
class Solution(object):
def myAtoi(self, str):
"""
:type str: str
:rtype: int
"""
strStrip = list(str.strip())
n = len(strStrip)
if n == 0:
return 0
symbol = -1 if strStrip[0]=='-' else 1
if strStrip[0] in ['-','+']: del strStrip[0]
nDelSymbol = len(strStrip)
sumStr = 0
for i in xrange(nDelSymbol):
if strStrip[i].isdigit():
sumStr = sumStr*10 + ord(strStrip[i]) - ord('0')
else:
break
return max(-2**31, min(2**31-1, symbol*sumStr))
if __name__ == '__main__':
s = Solution();
str = "2147483648"
print s.myAtoi(str)
print 2**31
| zhuxiang/LeetCode-Python | src/8-StringToInteger.py | Python | apache-2.0 | 723 |
# -*- coding: utf-8 -*-
"""
Malort Test Helpers
Test Runner: PyTest
"""
import os
import unittest
TEST_FILES_1 = os.path.normpath(os.path.join(os.path.abspath(__file__),
'..', 'tests', 'test_files'))
TEST_FILES_2 = os.path.normpath(os.path.join(os.path.abspath(__file__),
'..', 'tests', 'test_files_newline_delimited'))
TEST_FILES_3 = os.path.normpath(os.path.join(os.path.abspath(__file__),
'..', 'tests', 'test_files_nested'))
TEST_FILES_4 = os.path.normpath(os.path.join(os.path.abspath(__file__),
'..', 'tests', 'test_files_mult_type'))
class TestHelpers(unittest.TestCase):
def assert_stats(self, result, expected):
"""Test helper for testing stats results"""
for key, value in result.items():
if key == 'total_records':
self.assertEqual(expected['total_records'], value)
continue
for typek, typev in value.items():
if typek == 'str':
for k, v in typev.items():
if isinstance(v, list):
self.assertTrue(len(v) <= 3)
for item in v:
self.assertIn(item, expected[key][typek][k])
else:
self.assertEquals(expected[key][typek][k], v)
elif typek == 'base_key':
self.assertEquals(typev, expected[key][typek])
else:
self.assertDictEqual(typev, expected[key][typek]) | wrobstory/malort | malort/test_helpers.py | Python | mit | 1,642 |
import year
import conference
import paper
import author
import affiliation
import resource | riccardodg/lodstuff | lremap/it.cnr.ilc.lremapowl/src/lremapobj/__init__.py | Python | gpl-3.0 | 91 |
##
## This file is part of the sigrok project.
##
## Copyright (C) 2012 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
'''
1-Wire protocol decoder (network layer).
The 1-Wire protocol enables bidirectional communication over a single wire
(and ground) between a single master and one or multiple slaves. The protocol
is layered:
- Link layer (reset, presence detection, reading/writing bits)
- Network layer (skip/search/match device ROM addresses)
- Transport layer (transport data between 1-Wire master and device)
Network layer:
Protocol output format:
TODO.
Annotations:
The following link layer annotations are shown:
- RESET/PRESENCE True/False
The event is marked from the signal negative edge to the end of the reset
high period. It is also reported if there are any devices attached to the
bus.
The following network layer annotations are shown:
- ROM command <val> <name>
The requested ROM command is displayed as an 8bit hex value and by name.
- ROM <val>
The 64bit value of the addressed device is displayed:
Family code (1 byte) + serial number (6 bytes) + CRC (1 byte)
- Data <val>
Data intended for the transport layer is displayed as an 8bit hex value.
TODO:
- Add CRC checks, to see if there were communication errors on the wire.
- Add reporting original/complement address values from the search algorithm.
'''
from .onewire_network import *
| robacklin/sigrok | decoders/onewire_network/__init__.py | Python | gpl-3.0 | 2,109 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CheckNameRequest(Model):
"""CheckNameRequest.
:param name: Workspace collection name
:type name: str
:param type: Resource type. Default value:
"Microsoft.PowerBI/workspaceCollections" .
:type type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, name=None, type="Microsoft.PowerBI/workspaceCollections"):
self.name = name
self.type = type
| rjschwei/azure-sdk-for-python | azure-mgmt-powerbiembedded/azure/mgmt/powerbiembedded/models/check_name_request.py | Python | mit | 1,018 |
import pytest
from django.urls import reverse
from pythonpro.domain.user_domain import find_user_interactions
from pythonpro.launch.facade import LAUNCH_STATUS_CPL1, LAUNCH_STATUS_OPEN_CART, LAUNCH_STATUS_PPL
@pytest.fixture
def tag_as_mock(mocker):
return mocker.patch('pythonpro.domain.user_domain._email_marketing_facade.tag_as.delay')
@pytest.fixture
def launch_status_as_mock(mocker):
return mocker.patch(
'pythonpro.launch.views.get_launch_status', return_value=LAUNCH_STATUS_CPL1
)
@pytest.fixture
def resp(client, tag_as_mock, launch_status_as_mock):
return client.get(reverse('launch:cpl1'))
def test_status_code(resp):
assert 200 == resp.status_code
@pytest.fixture
def resp_with_user(client_with_user, tag_as_mock):
return client_with_user.get(reverse('launch:cpl1'))
def test_user_interaction(resp_with_user, logged_user):
assert 'CPL1' in [i.category for i in find_user_interactions(logged_user)]
def test_email_marketing_tag(resp_with_user, logged_user, tag_as_mock):
tag_as_mock.assert_called_once_with(logged_user.email, logged_user.id, 'cpl1')
@pytest.fixture
def resp_with_user_with_launch_status_open_cart(
client_with_user, tag_as_mock, launch_status_as_mock
):
launch_status_as_mock.return_value = LAUNCH_STATUS_OPEN_CART
return client_with_user.get(reverse('launch:cpl1'))
def test_should_redirect_to_subscribe(resp_with_user_with_launch_status_open_cart, resp_with_user):
assert resp_with_user.status_code == 302
@pytest.fixture
def resp_with_user_with_launch_status_ppl(
client_with_user, tag_as_mock, launch_status_as_mock
):
launch_status_as_mock.return_value = LAUNCH_STATUS_PPL
return client_with_user.get(reverse('launch:cpl1'))
def test_should_redirect_to_ppl(resp_with_user_with_launch_status_ppl, resp_with_user):
assert resp_with_user.status_code == 302
| pythonprobr/pythonpro-website | pythonpro/launch/tests/test_cpl1.py | Python | agpl-3.0 | 1,890 |
import unittest
import sys
sys.path.append("../src")
from remote_controller import *
class TestRemoteController(unittest.TestCase):
class StuntMotion():
def __init__(self):
self.reset()
def forward(self):
self.fward = True
def backward(self):
self.bward = True
def turn_right(self):
self.right = True
def turn_left(self):
self.left = True
def stop(self):
self.stop_ = True
def reset(self):
self.fward = False
self.bward = False
self.right = False
self.left = False
self.stop_ = False
class StuntLights():
def __init__(self):
self.reset()
def enable_front_lights(self):
self.front_lights = True
def disable_front_lights(self):
self.front_lights = False
def enable_back_lights(self):
self.back_lights = True
def disable_back_lights(self):
self.back_lights = False
def reset(self):
self.front_lights = False
self.back_lighst = False
def test_command_realtime(self):
rc = RemoteController()
rc.motion = self.StuntMotion()
rc.lights = self.StuntLights()
rc.command_realtime("F")
self.assertEqual(rc.motion.fward, True)
rc.motion.reset()
rc.command_realtime("B")
self.assertEqual(rc.motion.bward, True)
rc.motion.reset()
rc.command_realtime("L")
self.assertEqual(rc.motion.left, True)
rc.motion.reset()
rc.command_realtime("R")
self.assertEqual(rc.motion.right, True)
rc.motion.reset()
rc.command_realtime("G")
self.assertEqual(rc.motion.fward, True)
self.assertEqual(rc.motion.left, True)
rc.motion.reset()
rc.command_realtime("I")
self.assertEqual(rc.motion.fward, True)
self.assertEqual(rc.motion.right, True)
rc.motion.reset()
rc.command_realtime("H")
self.assertEqual(rc.motion.bward, True)
self.assertEqual(rc.motion.left, True)
rc.motion.reset()
rc.command_realtime("J")
self.assertEqual(rc.motion.bward, True)
self.assertEqual(rc.motion.right, True)
rc.motion.reset()
rc.command_realtime("S")
self.assertEqual(rc.motion.stop_, True)
rc.motion.reset()
rc.command_realtime("W")
self.assertEqual(rc.lights.front_lights, True)
rc.lights.reset()
rc.lights.front_lights = True
rc.command_realtime("w")
self.assertEqual(rc.lights.front_lights, False)
rc.lights.reset()
rc.command_realtime("U")
self.assertEqual(rc.lights.back_lights, True)
rc.lights.reset()
rc.lights.back_lights = True
rc.command_realtime("u")
self.assertEqual(rc.lights.back_lights, False)
rc.lights.reset()
def test_predefined(self):
rc = RemoteController()
rc.motion = self.StuntMotion()
rc.lights = self.StuntLights()
rc.on_message("V")
rc.on_message("F")
rc.on_message("S")
rc.on_message("B")
rc.on_message("S")
rc.on_message("v")
self.assertEqual(rc.motion.fward, True)
self.assertEqual(rc.motion.stop_, True)
self.assertEqual(rc.motion.bward, True)
def test_predefined_lights(self):
rc = RemoteController()
rc.motion = self.StuntMotion()
rc.lights = self.StuntLights()
rc.on_message("V")
rc.on_message("W")
rc.on_message("U")
rc.on_message("v")
self.assertEqual(rc.lights.front_lights, True)
self.assertEqual(rc.lights.back_lights, True)
if __name__ == "__main__":
unittest.main()
| IvayloTsankov/carty | tests/test_remote_controller.py | Python | gpl-2.0 | 3,884 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Jack Says - A Web-Based, "Simon Says"-like, multiplayer game.
# Copyright (C) 2015 https://github.com/cedricbonhomme/JackSays
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
from flask.ext.login import UserMixin
class User(UserMixin):
"""
Represent a user.
"""
def __init__(self, nic, score, avatar = "M123"):
self.nic = nic
self.score = score
self.avatar = avatar
def get_id(self):
"""
Return the id of the user.
"""
return self.nic
def dump(self):
return {"nic": self.nic,
"score": self.score}
class Game(object):
"""
Represent a mini game.
"""
def __init__(self):
self.user_vals = {}
self.game_id = 0
self.duration = 30.0
self.data = None
self.stime = time.time()
self.message = ""
self.start_script = ""
self.finish_script = ""
def get_time_left(self):
return self.duration-(time.time()-self.stime)
def get_data(self):
return None
def user_input(self, username, data):
self.user_vals[username] = data
def finalize(self):
"""
overload this method
"""
return ""
| cedricbonhomme/JackSays | web/models.py | Python | agpl-3.0 | 1,901 |
# Copyright (C) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''WSGI frontend for the burrow server.'''
import json
import types
import eventlet.wsgi
import routes.middleware
import webob.dec
import burrow.frontend
from burrow.openstack.common.gettextutils import _
# Default configuration values for this module.
DEFAULT_HOST = '0.0.0.0'
DEFAULT_PORT = 8080
DEFAULT_BACKLOG = 64
DEFAULT_SSL = False
DEFAULT_SSL_CERTFILE = 'example.pem'
DEFAULT_SSL_KEYFILE = 'example.key'
DEFAULT_THREAD_POOL_SIZE = 0
DEFAULT_TTL = 600
DEFAULT_HIDE = 0
class Frontend(burrow.frontend.Frontend):
'''Frontend implementation that implements the Burrow v1.0 protocol
using WSGI.'''
def __init__(self, config, backend):
super(Frontend, self).__init__(config, backend)
self.default_ttl = int(self.config.get('default_ttl', DEFAULT_TTL))
self.default_hide = int(self.config.get('default_hide', DEFAULT_HIDE))
mapper = routes.Mapper()
mapper.connect('/', action='versions')
mapper.connect('/v1.0', action='accounts')
mapper.connect('/v1.0/{account}', action='queues')
mapper.connect('/v1.0/{account}/{queue}', action='messages')
mapper.connect('/v1.0/{account}/{queue}/{message}', action='message')
self._routes = routes.middleware.RoutesMiddleware(self._route, mapper)
def run(self, thread_pool):
'''Create the listening socket and start the thread that runs
the WSGI server. This extra thread is needed since the WSGI
server function blocks.'''
host = self.config.get('host', DEFAULT_HOST)
port = self.config.getint('port', DEFAULT_PORT)
backlog = self.config.getint('backlog', DEFAULT_BACKLOG)
socket = eventlet.listen((host, port), backlog=backlog)
self.log.info(
_('Listening on %(host)s:%(port)d') % dict(host=host, port=port))
if self.config.getboolean('ssl', DEFAULT_SSL):
certfile = self.config.get('ssl_certfile', DEFAULT_SSL_CERTFILE)
keyfile = self.config.get('ssl_keyfile', DEFAULT_SSL_KEYFILE)
socket = eventlet.green.ssl.wrap_socket(socket, certfile=certfile,
keyfile=keyfile)
thread_pool.spawn_n(self._run, socket, thread_pool)
def _run(self, socket, thread_pool):
'''Thread to run the WSGI server.'''
thread_pool_size = self.config.getint('thread_pool_size',
DEFAULT_THREAD_POOL_SIZE)
log_format = '%(client_ip)s "%(request_line)s" %(status_code)s ' \
'%(body_length)s %(wall_seconds).6f'
if thread_pool_size == 0:
eventlet.wsgi.server(socket, self, log=_WSGILog(self.log),
log_format=log_format, custom_pool=thread_pool)
else:
eventlet.wsgi.server(socket, self, log=_WSGILog(self.log),
log_format=log_format, max_size=thread_pool_size)
def __call__(self, *args, **kwargs):
return self._routes(*args, **kwargs)
@webob.dec.wsgify
def _route(self, req):
'''Parse the request args and see if there is a matching method.'''
args = req.environ['wsgiorg.routing_args'][1]
if not args:
return self._response(status=404)
action = args.pop('action')
method = getattr(self, '_%s_%s' % (req.method.lower(), action), None)
if method is not None:
return method(req, **args)
method = req.method.lower()
args = dict(args)
if method == 'post':
method = 'update'
args['attributes'] = self._parse_attributes(req)
method = getattr(self.backend, '%s_%s' % (method, action), None)
if method is None:
return self._response(status=405)
args['filters'] = self._parse_filters(req)
return self._response(body=lambda: method(**args))
@webob.dec.wsgify
def _get_versions(self, _req):
'''Return a list of API versions.'''
return self._response(body=['v1.0'])
@webob.dec.wsgify
def _put_message(self, req, account, queue, message):
'''Read the request body and create a new message.'''
attributes = self._parse_attributes(req, self.default_ttl,
self.default_hide)
body = ''
for chunk in iter(lambda: req.body_file.read(16384), ''):
body += str(chunk)
if self.backend.create_message(account, queue, message, body,
attributes):
return self._response(status=201)
return self._response()
def _parse_filters(self, req):
'''Parse filters from a request object and build a dict to
pass into the backend methods.'''
filters = {}
if 'limit' in req.params:
filters['limit'] = int(req.params['limit'])
if 'marker' in req.params:
filters['marker'] = req.params['marker']
if 'match_hidden' in req.params and \
req.params['match_hidden'].lower() == 'true':
filters['match_hidden'] = True
if 'detail' in req.params:
filters['detail'] = req.params['detail']
if 'wait' in req.params:
filters['wait'] = int(req.params['wait'])
return filters
def _parse_attributes(self, req, default_ttl=None, default_hide=None):
'''Parse attributes from a request object and build a dict
to pass into the backend methods.'''
attributes = {}
if 'ttl' in req.params:
ttl = int(req.params['ttl'])
else:
ttl = default_ttl
attributes['ttl'] = ttl
if 'hide' in req.params:
hide = int(req.params['hide'])
else:
hide = default_hide
attributes['hide'] = hide
return attributes
def _response(self, status=200, body=None, content_type=None):
'''Pack result into an appropriate HTTP response.'''
status, body = self._response_body(status, body)
if body is None:
content_type = ''
if status == 200:
status = 204
else:
if content_type is None:
if isinstance(body, list) or isinstance(body, dict):
content_type = 'application/json'
else:
content_type = 'application/octet-stream'
if content_type == 'application/json':
body = json.dumps(body, indent=2)
response = webob.Response(status=status)
if body is not None:
response.content_type = content_type
if isinstance(body, unicode):
response.unicode_body = body
else:
response.body = body
return response
def _response_body(self, status, body):
'''Normalize the body from the type given.'''
try:
if isinstance(body, types.FunctionType):
body = body()
if isinstance(body, types.GeneratorType):
body = list(body)
except burrow.InvalidArguments as exception:
status = 400
body = exception.message
except burrow.NotFound as exception:
status = 404
body = exception.message
if body == []:
body = None
return status, body
class _WSGILog(object):
'''Class for eventlet.wsgi.server to forward logging messages.'''
def __init__(self, log):
self.log = log
def write(self, message):
'''Write WSGI log message to burrow log.'''
self.log.debug(message.rstrip())
| emonty/burrow | burrow/frontend/wsgi.py | Python | apache-2.0 | 8,110 |
# Lec 3.1, slide 2
x = int(raw_input('Enter your number:'))
ans = 0
i = x
while i > 0:
ans = ans + x
i = i - 1
print(str(x) + '*' + str(x) + '=' + str(ans)) | medifle/python_6.00.1x | Lec3.1.2.py | Python | mit | 173 |
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Pelix Utilities: Task pool
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.5.7
:status: Beta
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Documentation strings format
__docformat__ = "restructuredtext en"
# Module version
__version_info__ = (0, 5, 7)
__version__ = ".".join(str(x) for x in __version_info__)
# ------------------------------------------------------------------------------
# Pelix
import pelix.utilities
# Standard library
import logging
import threading
try:
# Python 3
# pylint: disable=F0401
import queue
except ImportError:
# Python 2
# pylint: disable=F0401
import Queue as queue
# ------------------------------------------------------------------------------
class FutureResult(object):
"""
An object to wait for the result of a threaded execution
"""
def __init__(self, logger=None):
"""
Sets up the FutureResult object
:param logger: The Logger to use in case of error (optional)
"""
self._logger = logger or logging.getLogger(__name__)
self._done_event = pelix.utilities.EventData()
self.__callback = None
self.__extra = None
def __notify(self):
"""
Notify the given callback about the result of the execution
"""
if self.__callback is not None:
try:
self.__callback(self._done_event.data,
self._done_event.exception,
self.__extra)
except Exception as ex:
self._logger.exception("Error calling back method: %s", ex)
def set_callback(self, method, extra=None):
"""
Sets a callback method, called once the result has been computed or in
case of exception.
The callback method must have the following signature:
``callback(result, exception, extra)``.
:param method: The method to call back in the end of the execution
:param extra: Extra parameter to be given to the callback method
"""
self.__callback = method
self.__extra = extra
if self._done_event.is_set():
# The execution has already finished
self.__notify()
def execute(self, method, args, kwargs):
"""
Execute the given method and stores its result.
The result is considered "done" even if the method raises an exception
:param method: The method to execute
:param args: Method positional arguments
:param kwargs: Method keyword arguments
:raise Exception: The exception raised by the method
"""
# Normalize arguments
if args is None:
args = []
if kwargs is None:
kwargs = {}
try:
# Call the method
result = method(*args, **kwargs)
except Exception as ex:
# Something went wrong: propagate to the event and to the caller
self._done_event.raise_exception(ex)
raise
else:
# Store the result
self._done_event.set(result)
finally:
# In any case: notify the call back (if any)
self.__notify()
def done(self):
"""
Returns True if the job has finished, else False
"""
return self._done_event.is_set()
def result(self, timeout=None):
"""
Waits up to timeout for the result the threaded job.
Returns immediately the result if the job has already been done.
:param timeout: The maximum time to wait for a result (in seconds)
:raise OSError: The timeout raised before the job finished
:raise: The exception encountered during the call, if any
"""
if self._done_event.wait(timeout):
return self._done_event.data
else:
raise OSError("Timeout raised")
# ------------------------------------------------------------------------------
class ThreadPool(object):
"""
Executes the tasks stored in a FIFO in a thread pool
"""
def __init__(self, nb_threads, queue_size=0, timeout=5, logname=None):
"""
Sets up the task executor
:param nb_threads: Size of the thread pool
:param queue_size: Size of the task queue (0 for infinite)
:param timeout: Queue timeout (in seconds)
:param logname: Name of the logger
:raise ValueError: Invalid number of threads
"""
# Validate parameters
try:
nb_threads = int(nb_threads)
if nb_threads < 1:
raise ValueError("Pool size must be greater than 0")
except (TypeError, ValueError) as ex:
raise ValueError("Invalid pool size: {0}".format(ex))
# The logger
self._logger = logging.getLogger(logname or __name__)
# The loop control event
self._done_event = threading.Event()
self._done_event.set()
# The task queue
try:
queue_size = int(queue_size)
except (TypeError, ValueError):
# Not a valid integer
queue_size = 0
self._queue = queue.Queue(queue_size)
self._timeout = timeout
self.__lock = threading.Lock()
# The thread pool
self._nb_threads = nb_threads
self._threads = []
def start(self):
"""
Starts the thread pool. Does nothing if the pool is already started.
"""
if not self._done_event.is_set():
# Stop event not set: we're running
return
# Clear the stop event
self._done_event.clear()
# Create the threads
i = 0
while i < self._nb_threads:
i += 1
name = "{0}-{1}".format(self._logger.name, i)
thread = threading.Thread(target=self.__run, name=name)
self._threads.append(thread)
# Start'em
for thread in self._threads:
thread.start()
def stop(self):
"""
Stops the thread pool. Does nothing if the pool is already stopped.
"""
if self._done_event.is_set():
# Stop event set: we're stopped
return
# Set the stop event
self._done_event.set()
with self.__lock:
# Add something in the queue (to unlock the join())
try:
for _ in self._threads:
self._queue.put(self._done_event, True, self._timeout)
except queue.Full:
# There is already something in the queue
pass
# Join threads
for thread in self._threads:
while thread.is_alive():
# Wait 3 seconds
thread.join(3)
if thread.is_alive():
# Thread is still alive: something might be wrong
self._logger.warning("Thread %s is still alive...",
thread.name)
# Clear storage
del self._threads[:]
self.clear()
def enqueue(self, method, *args, **kwargs):
"""
Enqueues a task in the pool
:param method: Method to call
:return: A FutureResult object, to get the result of the task
:raise ValueError: Invalid method
:raise Full: The task queue is full
"""
if not hasattr(method, '__call__'):
raise ValueError("{0} has no __call__ member."
.format(method.__name__))
# Prepare the future result object
future = FutureResult(self._logger)
# Use a lock, as we might be "resetting" the queue
with self.__lock:
# Add the task to the queue
self._queue.put((method, args, kwargs, future), True,
self._timeout)
return future
def clear(self):
"""
Empties the current queue content.
Returns once the queue have been emptied.
"""
with self.__lock:
# Empty the current queue
try:
while True:
self._queue.get_nowait()
self._queue.task_done()
except queue.Empty:
# Queue is now empty
pass
# Wait for the tasks currently executed
self.join()
def join(self, timeout=None):
"""
Waits for all the tasks to be executed
:param timeout: Maximum time to wait (in seconds)
:return: True if the queue has been emptied, else False
"""
if self._queue.empty():
# Nothing to wait for...
return True
elif timeout is None:
# Use the original join
self._queue.join()
return True
else:
# Wait for the condition
with self._queue.all_tasks_done:
self._queue.all_tasks_done.wait(timeout)
return self._queue.empty()
def __run(self):
"""
The main loop
"""
while not self._done_event.is_set():
try:
# Wait for an action (blocking)
task = self._queue.get(True, self._timeout)
if task is self._done_event:
# Stop event in the queue: get out
self._queue.task_done()
return
except queue.Empty:
# Nothing to do
pass
else:
# Extract elements
method, args, kwargs, future = task
try:
# Call the method
future.execute(method, args, kwargs)
except Exception as ex:
self._logger.exception("Error executing %s: %s",
method.__name__, ex)
finally:
# Mark the action as executed
self._queue.task_done()
| isandlaTech/cohorte-runtime | python/src/lib/python/pelix/threadpool.py | Python | apache-2.0 | 10,775 |
input = """
t(Z) :- t0(Z).
t(Z) :- g(X,Y,Z), t(X).
t(Z) :- g(X,Y,Z), not t(Y).
t0(1).
g(1,2,3).
g(2,5,4).
g(2,4,5).
g(5,3,6).
"""
output = """
t(Z) :- t0(Z).
t(Z) :- g(X,Y,Z), t(X).
t(Z) :- g(X,Y,Z), not t(Y).
t0(1).
g(1,2,3).
g(2,5,4).
g(2,4,5).
g(5,3,6).
"""
| veltri/DLV2 | tests/parser/wellfounded.11.test.py | Python | apache-2.0 | 285 |
import os
from io import BytesIO
from translate.convert import html2po, po2html, test_convert
class TestHTML2PO:
def html2po(
self,
markup,
duplicatestyle="msgctxt",
keepcomments=False,
):
"""Helper to convert html to po without a file."""
inputfile = BytesIO(markup.encode() if isinstance(markup, str) else markup)
convertor = html2po.html2po()
return convertor.convertfile(inputfile, "test", duplicatestyle, keepcomments)
def po2html(self, posource, htmltemplate):
"""Helper to convert po to html without a file."""
# Convert pofile object to bytes
inputfile = BytesIO(bytes(posource))
outputfile = BytesIO()
templatefile = BytesIO(htmltemplate.encode())
assert po2html.converthtml(inputfile, outputfile, templatefile)
return outputfile.getvalue().decode("utf-8")
def countunits(self, pofile, expected):
"""helper to check that we got the expected number of messages"""
actual = len(pofile.units)
if actual > 0:
if pofile.units[0].isheader():
actual = actual - 1
print(pofile)
assert actual == expected
def compareunit(self, pofile, unitnumber, expected):
"""helper to validate a PO message"""
if not pofile.units[0].isheader():
unitnumber = unitnumber - 1
print("unit source: " + pofile.units[unitnumber].source + "|")
print("expected: " + expected + "|")
assert str(pofile.units[unitnumber].source) == str(expected)
def check_single(self, markup, itemtext):
"""checks that converting this markup produces a single element with value itemtext"""
pofile = self.html2po(markup)
self.countunits(pofile, 1)
self.compareunit(pofile, 1, itemtext)
def check_null(self, markup):
"""checks that converting this markup produces no elements"""
pofile = self.html2po(markup)
self.countunits(pofile, 0)
def check_phpsnippet(self, php):
"""Given a snippet of php, put it into an HTML shell and see if the results are as expected"""
self.check_single(
'<html><head></head><body><p><a href="'
+ php
+ '/site.html">Body text</a></p></body></html>',
"Body text",
)
self.check_single(
'<html><head></head><body><p>More things in <a href="'
+ php
+ '/site.html">Body text</a></p></body></html>',
'More things in <a href="' + php + '/site.html">Body text</a>',
)
self.check_single(
"<html><head></head><body><p>" + php + "</p></body></html>", php
)
def test_extract_lang_attribute_from_html_tag(self):
"""Test that the lang attribute is extracted from the html tag, issue #3884"""
markup = """<!DOCTYPE html>
<html lang="en">
<head>
<title>translate lang attribute</title>
</head>
<body>
</body>
</html>
"""
pofile = self.html2po(markup)
self.countunits(pofile, 2)
self.compareunit(pofile, 1, "en")
self.compareunit(pofile, 2, "translate lang attribute")
def test_do_not_extract_lang_attribute_from_tags_other_than_html(self):
"""Test that the lang attribute is extracted from the html tag"""
self.check_single('<p><span lang="fr">Français</span></p>', "Français")
def test_title(self):
"""test that we can extract the <title> tag"""
self.check_single(
"<html><head><title>My title</title></head><body></body></html>", "My title"
)
def test_title_with_linebreak(self):
"""Test a linebreak in the <title> tag"""
htmltext = """<html>
<head>
<title>My
title</title>
</head>
<body>
</body>
</html>
"""
self.check_single(htmltext, "My title")
def test_meta(self):
"""Test that we can extract certain <meta> info from <head>."""
self.check_single(
"""<html><head><meta name="keywords" content="these are keywords"></head><body></body></html>""",
"these are keywords",
)
def test_tag_p(self):
"""test that we can extract the <p> tag"""
self.check_single(
"<html><head></head><body><p>A paragraph.</p></body></html>", "A paragraph."
)
def test_tag_p_with_br(self):
"""test that we can extract the <p> tag with an embedded <br> element"""
markup = "<p>First line.<br>Second line.</p>"
pofile = self.html2po(markup)
self.compareunit(pofile, 1, "First line.<br>Second line.")
def test_tag_p_with_linebreak(self):
"""Test newlines within the <p> tag."""
htmltext = """<html>
<head>
</head>
<body>
<p>
A paragraph is a section in a piece of writing, usually highlighting a
particular point or topic. It always begins on a new line and usually
with indentation, and it consists of at least one sentence.
</p>
</body>
</html>
"""
self.check_single(
htmltext,
"A paragraph is a section in a piece of writing, usually highlighting a particular point or topic. It always begins on a new line and usually with indentation, and it consists of at least one sentence.",
)
def test_tag_p_with_linebreak_and_embedded_br(self):
"""Test newlines within the <p> tag when there is an embedded <br> element."""
markup = "<p>First\nline.<br>Second\nline.</p>"
pofile = self.html2po(markup)
self.compareunit(pofile, 1, "First line.<br>Second line.")
def test_uppercase_html(self):
"""Should ignore the casing of the html tags."""
self.check_single(
"<HTML><HEAD></HEAD><BODY><P>A paragraph.</P></BODY></HTML>", "A paragraph."
)
def test_tag_div(self):
"""test that we can extract the <div> tag"""
self.check_single(
"<html><head></head><body><div>A paragraph.</div></body></html>",
"A paragraph.",
)
markup = "<div>First line.<br>Second line.</div>"
pofile = self.html2po(markup)
self.compareunit(pofile, 1, "First line.<br>Second line.")
def test_tag_div_with_linebreaks(self):
"""Test linebreaks within a <div> tag."""
htmltext = """<html>
<head>
</head>
<body>
<div>
A paragraph is a section in a piece of writing, usually highlighting a
particular point or topic. It always begins on a new line and usually
with indentation, and it consists of at least one sentence.
</div>
</body>
</html>
"""
self.check_single(
htmltext,
"A paragraph is a section in a piece of writing, usually highlighting a particular point or topic. It always begins on a new line and usually with indentation, and it consists of at least one sentence.",
)
markup = "<div>First\nline.<br>Second\nline.</div>"
pofile = self.html2po(markup)
self.compareunit(pofile, 1, "First line.<br>Second line.")
def test_tag_a(self):
"""test that we can extract the <a> tag"""
self.check_single(
'<html><head></head><body><p>A paragraph with <a href="http://translate.org.za/">hyperlink</a>.</p></body></html>',
'A paragraph with <a href="http://translate.org.za/">hyperlink</a>.',
)
def test_tag_a_with_linebreak(self):
"""Test that we can extract the <a> tag with newlines in it."""
htmltext = """<html>
<head>
</head>
<body>
<p>A
paragraph
with <a
href="http://translate.org.za/">hyperlink</a>
and
newlines.</p></body></html>
"""
self.check_single(
htmltext,
'A paragraph with <a href="http://translate.org.za/">hyperlink</a> and newlines.',
)
def test_sequence_of_anchor_elements(self):
"""test that we can extract a sequence of anchor elements without mixing up start/end tags, issue #3768"""
self.check_single(
'<p><a href="http://example.com">This is a link</a> but this is not. <a href="http://example.com">However this is too</a></p>',
'<a href="http://example.com">This is a link</a> but this is not. <a href="http://example.com">However this is too</a>',
)
def test_tag_img(self):
"""Test that we can extract the alt attribute from the <img> tag."""
self.check_single(
"""<html><head></head><body><img src="picture.png" alt="A picture"></body></html>""",
"A picture",
)
def test_img_empty(self):
"""Test that we can extract the alt attribute from the <img> tag."""
htmlsource = """<html><head></head><body><img src="images/topbar.jpg" width="750" height="80"></body></html>"""
self.check_null(htmlsource)
def test_tag_img_inside_a(self):
"""Test that we can extract the alt attribute from the <img> tag when the img is embedded in a link."""
self.check_single(
"""<html><head></head><body><p><a href="#"><img src="picture.png" alt="A picture" /></a></p></body></html>""",
"A picture",
)
def test_tag_table_summary(self):
"""Test that we can extract the summary attribute."""
self.check_single(
"""<html><head></head><body><table summary="Table summary"></table></body></html>""",
"Table summary",
)
def test_table_simple(self):
"""Test that we can fully extract a simple table."""
markup = """<html><head></head><body><table><tr><th>Heading One</th><th>Heading Two</th></tr><tr><td>One</td><td>Two</td></tr></table></body></html>"""
pofile = self.html2po(markup)
self.countunits(pofile, 4)
self.compareunit(pofile, 1, "Heading One")
self.compareunit(pofile, 2, "Heading Two")
self.compareunit(pofile, 3, "One")
self.compareunit(pofile, 4, "Two")
def test_table_complex(self):
markup = """<table summary="This is the summary"><caption>A caption</caption><thead><tr><th abbr="Head 1">Heading One</th><th>Heading Two</th></tr></thead><tfoot><tr><td>Foot One</td><td>Foot Two</td></tr></tfoot><tbody><tr><td>One</td><td>Two</td></tr></tbody></table>"""
pofile = self.html2po(markup)
self.countunits(pofile, 9)
self.compareunit(pofile, 1, "This is the summary")
self.compareunit(pofile, 2, "A caption")
self.compareunit(pofile, 3, "Head 1")
self.compareunit(pofile, 4, "Heading One")
self.compareunit(pofile, 5, "Heading Two")
self.compareunit(pofile, 6, "Foot One")
self.compareunit(pofile, 7, "Foot Two")
self.compareunit(pofile, 8, "One")
self.compareunit(pofile, 9, "Two")
def test_table_empty(self):
"""Test that we ignore tables that are empty.
A table is deemed empty if it has no translatable content.
"""
self.check_null(
"""<html><head></head><body><table><tr><td><img src="bob.png"></td></tr></table></body></html>"""
)
self.check_null(
"""<html><head></head><body><table><tr><td> </td></tr></table></body></html>"""
)
self.check_null(
"""<html><head></head><body><table><tr><td><strong></strong></td></tr></table></body></html>"""
)
def test_address(self):
"""Test to see if the address element is extracted"""
self.check_single("<body><address>My address</address></body>", "My address")
def test_headings(self):
"""Test to see if the h* elements are extracted"""
markup = "<html><head></head><body><h1>Heading One</h1><h2>Heading Two</h2><h3>Heading Three</h3><h4>Heading Four</h4><h5>Heading Five</h5><h6>Heading Six</h6></body></html>"
pofile = self.html2po(markup)
self.countunits(pofile, 6)
self.compareunit(pofile, 1, "Heading One")
self.compareunit(pofile, 2, "Heading Two")
self.compareunit(pofile, 3, "Heading Three")
self.compareunit(pofile, 4, "Heading Four")
self.compareunit(pofile, 5, "Heading Five")
self.compareunit(pofile, 6, "Heading Six")
def test_headings_with_linebreaks(self):
"""Test to see if h* elements with newlines can be extracted"""
markup = "<html><head></head><body><h1>Heading\nOne</h1><h2>Heading\nTwo</h2><h3>Heading\nThree</h3><h4>Heading\nFour</h4><h5>Heading\nFive</h5><h6>Heading\nSix</h6></body></html>"
pofile = self.html2po(markup)
self.countunits(pofile, 6)
self.compareunit(pofile, 1, "Heading One")
self.compareunit(pofile, 2, "Heading Two")
self.compareunit(pofile, 3, "Heading Three")
self.compareunit(pofile, 4, "Heading Four")
self.compareunit(pofile, 5, "Heading Five")
self.compareunit(pofile, 6, "Heading Six")
def test_dt(self):
"""Test to see if the definition list title (dt) element is extracted"""
self.check_single(
"<html><head></head><body><dl><dt>Definition List Item Title</dt></dl></body></html>",
"Definition List Item Title",
)
def test_dd(self):
"""Test to see if the definition list description (dd) element is extracted"""
self.check_single(
"<html><head></head><body><dl><dd>Definition List Item Description</dd></dl></body></html>",
"Definition List Item Description",
)
def test_span(self):
"""test to check that we don't double extract a span item"""
self.check_single(
"<html><head></head><body><p>You are a <span>Spanish</span> sentence.</p></body></html>",
"You are a <span>Spanish</span> sentence.",
)
def test_ul(self):
"""Test to see if the list item <li> is extracted"""
markup = "<html><head></head><body><ul><li>Unordered One</li><li>Unordered Two</li></ul><ol><li>Ordered One</li><li>Ordered Two</li></ol></body></html>"
pofile = self.html2po(markup)
self.countunits(pofile, 4)
self.compareunit(pofile, 1, "Unordered One")
self.compareunit(pofile, 2, "Unordered Two")
self.compareunit(pofile, 3, "Ordered One")
self.compareunit(pofile, 4, "Ordered Two")
def test_nested_lists(self):
"""Nested lists should be extracted correctly"""
markup = """<!DOCTYPE html><html><head><title>Nested lists</title></head><body>
<ul>
<li>Vegetables</li>
<li>Fruit
<ul>
<li>Bananas</li>
<li>Apples</li>
<li>Pears</li>
</ul>
yeah, that should be enough
</li>
<li>Meat</li>
</ul>
</body></html>"""
pofile = self.html2po(markup)
self.countunits(pofile, 8)
self.compareunit(pofile, 1, "Nested lists")
self.compareunit(pofile, 2, "Vegetables")
self.compareunit(pofile, 3, "Fruit")
self.compareunit(pofile, 4, "Bananas")
self.compareunit(pofile, 5, "Apples")
self.compareunit(pofile, 6, "Pears")
self.compareunit(pofile, 7, "yeah, that should be enough")
self.compareunit(pofile, 8, "Meat")
def test_duplicates(self):
"""check that we use the default style of msgctxt to disambiguate duplicate messages"""
markup = (
"<html><head></head><body><p>Duplicate</p><p>Duplicate</p></body></html>"
)
pofile = self.html2po(markup)
self.countunits(pofile, 2)
# FIXME change this so that we check that the msgctxt is correctly added
self.compareunit(pofile, 1, "Duplicate")
assert pofile.units[1].getlocations() == ["None+html.body.p:1-26"]
self.compareunit(pofile, 2, "Duplicate")
assert pofile.units[2].getlocations() == ["None+html.body.p:1-42"]
def test_multiline_reflow(self):
"""check that we reflow multiline content to make it more readable for translators"""
self.check_single(
"""<td valign="middle" width="96%"><font class="headingwhite">South
Africa</font></td>""",
"""South Africa""",
)
def test_nested_tags(self):
"""check that we can extract items within nested tags"""
markup = "<div><p>Extract this</p>And this</div>"
pofile = self.html2po(markup)
self.countunits(pofile, 2)
self.compareunit(pofile, 1, "Extract this")
self.compareunit(pofile, 2, "And this")
def test_carriage_return(self):
"""Remove carriage returns from files in dos format."""
htmlsource = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">\r
<html><!-- InstanceBegin template="/Templates/masterpage.dwt" codeOutsideHTMLIsLocked="false" -->\r
<head>\r
<!-- InstanceBeginEditable name="doctitle" -->\r
<link href="fmfi.css" rel="stylesheet" type="text/css">\r
</head>\r
\r
<body>\r
<p>The rapid expansion of telecommunications infrastructure in recent\r
years has helped to bridge the digital divide to a limited extent.</p> \r
</body>\r
<!-- InstanceEnd --></html>\r
"""
self.check_single(
htmlsource,
"The rapid expansion of telecommunications infrastructure in recent years has helped to bridge the digital divide to a limited extent.",
)
def test_encoding_latin1(self):
"""Convert HTML input in iso-8859-1 correctly to unicode."""
"""Also verifies that the charset declaration isn't extracted as a translation unit."""
htmlsource = b"""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html><!-- InstanceBegin template="/Templates/masterpage.dwt" codeOutsideHTMLIsLocked="false" -->
<head>
<!-- InstanceBeginEditable name="doctitle" -->
<title>FMFI - South Africa - CSIR Openphone - Overview</title>
<!-- InstanceEndEditable -->
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
<meta name="keywords" content="fmfi, first mile, first inch, wireless, rural development, access devices, mobile devices, wifi, connectivity, rural connectivty, ict, low cost, cheap, digital divide, csir, idrc, community">
<!-- InstanceBeginEditable name="head" -->
<!-- InstanceEndEditable -->
<link href="../../../fmfi.css" rel="stylesheet" type="text/css">
</head>
<body>
<p>We aim to please \x96 will you aim too, please?</p>
<p>South Africa\x92s language diversity can be challenging.</p>
</body>
</html>
"""
pofile = self.html2po(htmlsource)
self.countunits(pofile, 4)
self.compareunit(pofile, 1, "FMFI - South Africa - CSIR Openphone - Overview")
self.compareunit(
pofile,
2,
"fmfi, first mile, first inch, wireless, rural development, access devices, mobile devices, wifi, connectivity, rural connectivty, ict, low cost, cheap, digital divide, csir, idrc, community",
)
self.compareunit(pofile, 3, "We aim to please \x96 will you aim too, please?")
self.compareunit(
pofile, 4, "South Africa\x92s language diversity can be challenging."
)
def test_strip_html(self):
"""Ensure that unnecessary html is stripped from the resulting unit."""
htmlsource = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<title>FMFI - Contact</title>
</head>
<body>
<table width="100%" border="0" cellpadding="0" cellspacing="0">
<tr align="left" valign="top">
<td width="150" height="556">
<table width="157" height="100%" border="0" cellspacing="0" id="leftmenubg-color">
<tr>
<td align="left" valign="top" height="555">
<table width="100%" border="0" cellspacing="0" cellpadding="2">
<tr align="left" valign="top" bgcolor="#660000">
<td width="4%"><strong></strong></td>
<td width="96%"><strong><font class="headingwhite">Projects</font></strong></td>
</tr>
<tr align="left" valign="top">
<td valign="middle" width="4%"><img src="images/arrow.gif" width="8" height="8"></td>
<td width="96%"><a href="index.html">Home Page</a></td>
</tr>
</table>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>
"""
pofile = self.html2po(htmlsource)
self.countunits(pofile, 3)
self.compareunit(pofile, 2, "Projects")
self.compareunit(pofile, 3, "Home Page")
# Translate and convert back:
pofile.units[2].target = "Projekte"
pofile.units[3].target = "Tuisblad"
htmlresult = (
self.po2html(bytes(pofile), htmlsource)
.replace("\n", " ")
.replace('= "', '="')
.replace("> <", "><")
)
snippet = '<td width="96%"><strong><font class="headingwhite">Projekte</font></strong></td>'
assert snippet in htmlresult
snippet = '<td width="96%"><a href="index.html">Tuisblad</a></td>'
assert snippet in htmlresult
def test_entityrefs_in_text(self):
"""Should extract html entityrefs, preserving the ones representing reserved characters"""
"""`See <https://developer.mozilla.org/en-US/docs/Glossary/Entity>`."""
self.check_single(
"<html><head></head><body><p><not an element> & " ' ’</p></body></html>",
"<not an element> & \" ' \u2019",
)
def test_entityrefs_in_attributes(self):
"""Should convert html entityrefs in attribute values"""
# it would be even nicer if " and ' could be preserved, but the automatic unescaping of
# attributes is deep inside html.HTMLParser.
self.check_single(
'<html><head></head><body><img alt="<not an element> & " ' ’"></body></html>',
"<not an element> & \" ' \u2019",
)
def test_charrefs(self):
"""Should extract html charrefs"""
self.check_single(
"<html><head></head><body><p>’ ’</p></body></html>",
"\u2019 \u2019",
)
def test_php(self):
"""Test that PHP snippets don't interfere"""
# A simple string
self.check_phpsnippet("""<?=$phpvariable?>""")
# Contains HTML tag characters (< and >)
self.check_phpsnippet("""<?=($a < $b ? $foo : ($b > c ? $bar : $cat))?>""")
# Make sure basically any symbol can be handled
# NOTE quotation mark removed since it violates the HTML format when placed in an attribute
self.check_phpsnippet(
"""<? asdfghjkl qwertyuiop 1234567890!@#$%^&*()-=_+[]\\{}|;':,./<>? ?>"""
)
def test_multiple_php(self):
"""Test multiple PHP snippets in a string to make sure they get restored properly"""
php1 = """<?=$phpvariable?>"""
php2 = """<?=($a < $b ? $foo : ($b > c ? $bar : $cat))?>"""
php3 = """<? asdfghjklqwertyuiop1234567890!@#$%^&*()-=_+[]\\{}|;':",./<>? ?>"""
# Put 3 different strings into an html string
innertext = (
'<a href="'
+ php1
+ '/site.html">Body text</a> and some '
+ php2
+ " more text "
+ php2
+ php3
)
htmlsource = "<html><head></head><body><p>" + innertext + "</p></body></html>"
self.check_single(htmlsource, innertext)
def test_php_multiline(self):
# A multi-line php string to test
php1 = """<? abc
def
ghi ?>"""
# Scatter the php strings throughout the file, and show what the translation should be
innertext = (
'<a href="'
+ php1
+ '/site.html">Body text</a> and some '
+ php1
+ " more text "
+ php1
+ php1
)
innertrans = (
'<a href="'
+ php1
+ '/site.html">Texte de corps</a> et encore de '
+ php1
+ " plus de texte "
+ php1
+ php1
)
htmlsource = (
"<html><head></head><body><p>" + innertext + "</p></body></html>"
) # Current html file
transsource = (
"<html><head></head><body><p>" + innertrans + "</p></body></html>"
) # Expected translation
pofile = self.html2po(htmlsource)
pofile.units[1].target = innertrans # Register the translation in the PO file
htmlresult = self.po2html(pofile, htmlsource)
assert htmlresult == transsource
def test_php_with_embedded_html(self):
"""Should not consume HTML within processing instructions"""
self.check_single(
"<html><head></head><body><p>a <? <p>b</p> ?> c</p></body></html>",
"a <? <p>b</p> ?> c",
)
def test_comments(self):
"""Test that HTML comments are converted to translator notes in output"""
pofile = self.html2po(
"<!-- comment outside block --><p><!-- a comment -->A paragraph<!-- with another comment -->.</p>",
keepcomments=True,
)
self.compareunit(pofile, 1, "A paragraph.")
notes = pofile.getunits()[-1].getnotes()
assert str(notes) == " a comment \n with another comment "
def test_attribute_without_value(self):
htmlsource = """<ul>
<li><a href="logoColor.eps" download>EPS färg</a></li>
</ul>
"""
pofile = self.html2po(htmlsource)
self.compareunit(pofile, 1, "EPS färg")
class TestHTML2POCommand(test_convert.TestConvertCommand, TestHTML2PO):
"""Tests running actual html2po commands on files"""
convertmodule = html2po
defaultoptions = {"progress": "none"}
def test_multifile_single(self):
"""Test the --multifile=single option and make sure it produces one pot file per input file."""
self.create_testfile(
"file1.html", "<div>You are only coming through in waves</div>"
)
self.create_testfile(
"file2.html", "<div>Your lips move but I cannot hear what you say</div>"
)
self.run_command("./", "pots", pot=True, multifile="single")
assert os.path.isfile(self.get_testfilename("pots/file1.pot"))
assert os.path.isfile(self.get_testfilename("pots/file2.pot"))
content = str(self.read_testfile("pots/file1.pot"))
assert "coming through" in content
assert "cannot hear" not in content
def test_multifile_onefile(self):
"""Test the --multifile=onefile option and make sure it produces a file, not a directory."""
self.create_testfile(
"file1.html", "<div>You are only coming through in waves</div>"
)
self.create_testfile(
"file2.html", "<div>Your lips move but I cannot hear what you say</div>"
)
self.run_command("./", "one.pot", pot=True, multifile="onefile")
assert os.path.isfile(self.get_testfilename("one.pot"))
content = str(self.read_testfile("one.pot"))
assert "coming through" in content
assert "cannot hear" in content
def test_multifile_onefile_to_stdout(self, capsys):
"""Test the --multifile=onefile option without specifying an output file. Default is stdout."""
self.create_testfile(
"file1.html", "<div>You are only coming through in waves</div>"
)
self.create_testfile(
"file2.html", "<div>Your lips move but I cannot hear what you say</div>"
)
self.run_command("./", pot=True, multifile="onefile")
content, err = capsys.readouterr()
assert "coming through" in content
assert "cannot hear" in content
assert err == ""
def test_help(self, capsys):
"""Test getting help."""
options = super().test_help(capsys)
options = self.help_check(options, "-P, --pot")
options = self.help_check(options, "--duplicates=DUPLICATESTYLE")
options = self.help_check(options, "--keepcomments")
options = self.help_check(options, "--multifile=MULTIFILESTYLE", last=True)
| miurahr/translate | translate/convert/test_html2po.py | Python | gpl-2.0 | 28,109 |
from flask import Blueprint
# initialization of the blueprint
um = Blueprint('um', __name__, static_folder='static', template_folder='templates', url_prefix='/contacts')
import views | SNET-Entrance/Entrance-UM | src/um/__init__.py | Python | apache-2.0 | 184 |
#!/usr/bin/env python
#coding=utf-8
#########################################################################
# File Name: lda.py
# Author: Mark Chen
# mail: zhenchentl@gmail.com
# Created Time: 2014年12月14日 星期日 11时07分12秒
#########################################################################
from util.Params import *
import logging
from gensim import corpora, models
from redisHelper.RedisHelper import RedisHelper
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', \
level=logging.INFO)
class baseLda:
def __init__(self):
self.mRedis = RedisHelper()
self.authorDocEnd = -1
self.docs = list()
self.corpus_lda = list()
for line in open(PATH_AUTHOR_DOC, 'r'):
self.docs.append(line.split())
self.authorDocEnd += 1
for line in open(PATH_CONF_DOCNF_DOC, 'r'):
self.docs.append(line.split())
print len(self.docs)
def lda_setp1(self):
'''Step1'''
dictionary = corpora.Dictionary(self.docs)
logging.info("store the dictionary, for future reference.")
dictionary.save_as_text(PATH_LDA_DIC)
corpus = [dictionary.doc2bow(doc) for doc in self.docs]
logging.info("store to disk, for later use.")
corpora.MmCorpus.serialize(PATH_LDA_MM, corpus)
def lda_step2(self):
'''Step2'''
logging.info("load Dictionary.")
id2word = corpora.Dictionary.load_from_text(PATH_LDA_DIC)
logging.info("load corpus iterator.")
mm = corpora.MmCorpus(PATH_LDA_MM)
logging.info('LDA Start.')
lda = models.ldamodel.LdaModel(corpus=mm, id2word=id2word, \
num_topics=100, update_every=1, chunksize=10000, passes=1)
logging.info('LDA End')
self.corpus_lda = list(lda[mm])
self.saveVec(self.corpus_lda)
def saveVec(self):
authors = self.mRedis.getAllAuthors()
confs = self.mRedis.getAllConfs()
for author in authors:
DocId = int(self.mRedis.getDocIdByAuthor(author))
vec = self.corpus_lda[DocId]
for topic, value in vec:
self.mRedis.addAuthorVec(author, \
str(topic) + ':' + str(value))
for conf in confs:
DocId = int(self.mRedis.getDocIdByConf(conf))
vec = self.corpus_lda[DocId + self.authorDocEnd + 1]
for topic, value in vec:
self.mRedis.addConfVec(conf, \
str(topic) + ':' + str(value))
self.docs = []
self.corpus_lda = []
if __name__ == '__main__':
baselda = baseLda()
# baselda.lda_setp1()
baselda.lda_step2()
| zhenchentl/confRec | topic/lda.py | Python | mit | 2,746 |
"""
dyld emulation
"""
from itertools import chain
import os, sys
from macholib.framework import framework_info
from macholib.dylib import dylib_info
__all__ = [
'dyld_find', 'framework_find',
'framework_info', 'dylib_info',
]
# These are the defaults as per man dyld(1)
#
_DEFAULT_FRAMEWORK_FALLBACK = [
os.path.expanduser("~/Library/Frameworks"),
"/Library/Frameworks",
"/Network/Library/Frameworks",
"/System/Library/Frameworks",
]
_DEFAULT_LIBRARY_FALLBACK = [
os.path.expanduser("~/lib"),
"/usr/local/lib",
"/lib",
"/usr/lib",
]
# XXX: Is this function still needed?
if sys.version_info[0] == 2:
def _ensure_utf8(s):
"""Not all of PyObjC and Python understand unicode paths very well yet"""
if isinstance(s, unicode):
return s.encode('utf8')
return s
else:
def _ensure_utf8(s):
if s is not None and not isinstance(s, str):
raise ValueError(s)
return s
def _dyld_env(env, var):
if env is None:
env = os.environ
rval = env.get(var)
if rval is None or rval == '':
return []
return rval.split(':')
def dyld_image_suffix(env=None):
if env is None:
env = os.environ
return env.get('DYLD_IMAGE_SUFFIX')
def dyld_framework_path(env=None):
return _dyld_env(env, 'DYLD_FRAMEWORK_PATH')
def dyld_library_path(env=None):
return _dyld_env(env, 'DYLD_LIBRARY_PATH')
def dyld_fallback_framework_path(env=None):
return _dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
def dyld_fallback_library_path(env=None):
return _dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
def dyld_image_suffix_search(iterator, env=None):
"""For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
suffix = dyld_image_suffix(env)
if suffix is None:
return iterator
def _inject(iterator=iterator, suffix=suffix):
for path in iterator:
if path.endswith('.dylib'):
yield path[:-len('.dylib')] + suffix + '.dylib'
else:
yield path + suffix
yield path
return _inject()
def dyld_override_search(name, env=None):
# If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
# framework name, use the first file that exists in the framework
# path if any. If there is none go on to search the DYLD_LIBRARY_PATH
# if any.
framework = framework_info(name)
if framework is not None:
for path in dyld_framework_path(env):
yield os.path.join(path, framework['name'])
# If DYLD_LIBRARY_PATH is set then use the first file that exists
# in the path. If none use the original name.
for path in dyld_library_path(env):
yield os.path.join(path, os.path.basename(name))
def dyld_executable_path_search(name, executable_path=None):
# If we haven't done any searching and found a library and the
# dylib_name starts with "@executable_path/" then construct the
# library name.
if name.startswith('@executable_path/') and executable_path is not None:
yield os.path.join(executable_path, name[len('@executable_path/'):])
def dyld_default_search(name, env=None):
yield name
framework = framework_info(name)
if framework is not None:
fallback_framework_path = dyld_fallback_framework_path(env)
if fallback_framework_path:
for path in fallback_framework_path:
yield os.path.join(path, framework['name'])
else:
for path in _DEFAULT_FRAMEWORK_FALLBACK:
yield os.path.join(path, framework['name'])
fallback_library_path = dyld_fallback_library_path(env)
if fallback_library_path:
for path in fallback_library_path:
yield os.path.join(path, os.path.basename(name))
else:
for path in _DEFAULT_LIBRARY_FALLBACK:
yield os.path.join(path, os.path.basename(name))
def dyld_find(name, executable_path=None, env=None):
"""
Find a library or framework using dyld semantics
"""
name = _ensure_utf8(name)
executable_path = _ensure_utf8(executable_path)
for path in dyld_image_suffix_search(chain(
dyld_override_search(name, env),
dyld_executable_path_search(name, executable_path),
dyld_default_search(name, env),
), env):
if os.path.isfile(path):
return path
raise ValueError("dylib %s could not be found" % (name,))
def framework_find(fn, executable_path=None, env=None):
"""
Find a framework using dyld semantics in a very loose manner.
Will take input such as:
Python
Python.framework
Python.framework/Versions/Current
"""
try:
return dyld_find(fn, executable_path=executable_path, env=env)
except ValueError:
pass
fmwk_index = fn.rfind('.framework')
if fmwk_index == -1:
fmwk_index = len(fn)
fn += '.framework'
fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
return dyld_find(fn, executable_path=executable_path, env=env)
| timeyyy/PyUpdater | pyupdater/vendor/PyInstaller/lib/macholib/dyld.py | Python | bsd-2-clause | 5,109 |
#-*- coding: utf-8 -*-
from __future__ import division
# Ultimate load of a masonry bridge according to thesis of Alix Grandjean: "Capacité portante de ponts en arc en maçonnerie de pierre naturelle". École Polytechnique Féderale de Lausanne.
#See Annexe 14 page 235
__author__= "Luis C. Pérez Tato (LCPT) and Ana Ortega (AOO)"
__copyright__= "Copyright 2015, LCPT and AOO"
__license__= "GPL"
__version__= "3.0"
__email__= "l.pereztato@gmail.com"
import math
import sys
import rough_calculations.masonryVault
#Coefficients of polynomial
f= -1.47e-3 #-1.47e-12
j= 4.4e-2 #4.4e-8
k= -5.46e-1 #-5.46e-4
r= 3.238
#x coordinates of the hinges.
xA= 3.156
xC= 6.175
xD= 10.996
xB= 14.001
d= 1 # arch thickness.
v= 4 # arch width
L= 15 #arch span
hR= 9 #??
hS= 0.5 #??
#Material parameters.
alpha= 0.726 #??
beta= 6.095 #??
#Self weight
PPR= 18e3 #N/m3
PPS= 20e3 #N/m3
#Geometric parameters.
a= xC-xA
b= xB-xD
# Warning!. We change the sign of the
# angle to make it equal to that in
# the example (Thesis. Annexe 14. page 235).
archGeom= rough_calculations.masonryVault.archGeometry([f,j,k,r],[xA,xC,xD,xB])
gammaD= -archGeom.calcGamma(xD)
hA= archGeom.yAxis(xA)
hC= archGeom.yAxis(xC)
hD= archGeom.yAxis(xD)
hB= archGeom.yAxis(xB)
LR= xB-xA
hHalfL= archGeom.yAxis(L/2)
#Filling characteristics
angPhi= math.radians(30)
cohesion= 0
mp= 0.33 #Correction factors.
mc= 0.01
#Modèle de charge du traffic routier.
delta= math.radians(30)
hRcle= 1.5 # Hauteur du remplissage sur la clé de la voûte (m).
Q= 160000 # Charge ponctuelle due au trafic (N).
qrep= 0.005e6 # Charge uniformément repartie due au trafic (Pa).
# Loads
eta= v*(PPS*hS*LR+PPR*(hR*LR+archGeom.aux1(xB,xA)))
phi= v*(PPS*hS*a+PPR*(hR*a+archGeom.aux1(xC,xA)))
psi= v*(PPS*hS*b+PPR*(hR*b+archGeom.aux1(xB,xD)))
etaW= v*(PPS*hS*pow(LR,2)/2+PPR*(hR*pow(LR,2)/2-f*pow(xB,6)/30-j*pow(xB,5)/20-k*pow(xB,4)/12-r*pow(xB,3)/6-f*pow(xA,6)/6-j*pow(xA,5)/5+f*pow(xA,5)*xB/5-k*pow(xA,4)/4+j*pow(xA,4)*xB/4-r*pow(xA,3)/3+k*pow(xA,3)*xB/3+r*pow(xA,2)*xB/2))
phiS= v*(PPS*hS*pow(a,2)/2+PPR*(hR*pow(a,2)/2-f*pow(xC,6)/30-j*pow(xC,5)/20-k*pow(xC,4)/12-r*pow(xC,3)/6-f*pow(xA,6)/6-j*pow(xA,5)/5+f*pow(xA,5)*xC/5-k*pow(xA,4)/4+j*pow(xA,4)*xC/4-r*pow(xA,3)/3+k*pow(xA,3)*xC/3+r*pow(xA,2)*xC/2))
psiT= v*(PPS*hS*pow(b,2)/2+PPR*(hR*pow(b,2)/2-f*pow(xD,6)/30-j*pow(xD,5)/20-k*pow(xD,4)/12-r*pow(xD,3)/6-f*pow(xB,6)/6-j*pow(xB,5)/5+f*pow(xB,5)*xD/5-k*pow(xB,4)/4+j*pow(xB,4)*xD/4-r*pow(xB,3)/3+k*pow(xB,3)*xD/3+r*pow(xB,2)*xD/2))
#Earth pressure of filling.
Kp= pow(math.tan(math.pi/4+angPhi/2),2)
Kc= 2*math.sqrt(Kp)
R= v*(Kp*mp*(hS*PPS*(hD-hB)+PPR*hR*(hD-hB)-PPR*(hD*hD/2-hB*hB/2))+cohesion*mc*Kc*(hD-hB))
RzB= v*(Kp*mp*(hS*PPS*(hD*hD/2-hB*hB/2-hB*(hD-hB)))+PPR*(hR*(hD*hD/2-hB*hB/2)-hR*hB*(hD-hB)-pow(hD,3)/3+pow(hB,3)/3+hB*(hD*hD/2-hB*hB/2))+cohesion*mc*Kc*(hD*hD/2-hB*hB/2-hB*(hD-hB)))
RzD= v*(Kp*mp*(hS*PPS*(hD*(hD-hB)-hD*hD/2+hB*hB/2))+PPR*(hR*hD*(hD-hB)-hR*(hD*hD/2-hB*hB/2)-hD*(hD*hD/2-hB*hB/2)+pow(hD,3)/3-pow(hB,3)/3)+cohesion*mc*Kc*(hD*(hD-hB)-hD*hD/2+hB*hB/2))
vQt= rough_calculations.masonryVault.vQtrans(v,delta,hRcle)
qtrans= Q/vQt
lQt= rough_calculations.masonryVault.lQtrans(a,delta,hRcle)
X= qtrans/lQt
# Résistance
Nadmis= -1.25e6 #Effort axial admisible
Madmis= rough_calculations.masonryVault.diagInteraction(Nadmis,d,v,alpha,beta)
E= rough_calculations.masonryVault.calcE6p27(X,qrep,L,LR,v,lQt,a,b,hA,hB,hC,hD,xA)
F= rough_calculations.masonryVault.calcF6p28(R,LR,a,b,eta,phiS,etaW,psiT,Madmis,Madmis,Madmis,RzB,RzD,hA,hB,hC,hD)
G= rough_calculations.masonryVault.calcG6p29(X,qrep,L,LR,v,lQt,a,b,hA,hB,hC,hD,xA,gammaD)
H= rough_calculations.masonryVault.calcH6p30(LR,a,eta,psi,phiS,etaW,Madmis,Madmis,Madmis,RzB,hA,hB,hC,hD,gammaD)
n= rough_calculations.masonryVault.calcn6p32(alpha,beta,d,v,E,F,G,H)
#rough_calculations.masonryVault.printVouteResults(L, f, j, k, r, a,b,gammaD,hA,hB,hC,hD,LR,d,v,hR,hS,hHalfL,alpha,beta,PPR,PPS,eta,etaW,phi,phiS,psi,psiT,mp,Kp,mc,Kc,R, RzB, RzD,qtrans,X,Nadmis,Madmis,E,F,G,H,n)
nTeor= 8.47812250571
ratio1= (n-nTeor)/nTeor
import os
from miscUtils import LogMessages as lmsg
fname= os.path.basename(__file__)
if abs(ratio1)<1e-5:
print "test ",fname,": ok."
else:
lmsg.error(fname+' ERROR.')
| lcpt/xc | verif/tests/rough_calculations/masonry_bridge_verif_01.py | Python | gpl-3.0 | 4,218 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""The part of CIS which handles authentication of Services, in addition to
access control. The access control is mostly a wrapper around BofhdAuth that
was originally used by bofhd.
Authentication
--------------
To add authentication for all public methods in a Service class::
from Cerebrum.modules.cis import SoapListener, auth
class NewService(SoapListener.BasicSoapServer):
'''Your Service class that requires authentication'''
# The session ID is put in the SOAP headers, and is required for the
# authentication functionality:
__in_header__ = SoapListener.SessionHeader
# All your public methods here
# Add auth events to the service:
# This will check if the client is authenticated and raise
# NotAuthenticatedErrors for everyone that are not authenticated.
NewService.event_manager.add_listener('method_call',
auth.on_method_authentication)
# ...
# Add the proper auth service to the list of services, to make the auth
# methods callable (otherwise you would never be able to authenticate).
services = [auth.PasswordAuthenticationService, NewService]
server = SoapListener.TwistedSoapStarter(applications = services, ...)
server.run()
TODO: add documentation for how to only require authentication for single
public methods.
Access control
--------------
It is recommended to check the authorizations in the cerebrum specific
classes, and not in the service classes. The reason is that we could find out
more about the operator and the environment when we're in the Cerebrum class.
To support access control, simply make use of BofhdAuth in the same way as for
the bofh daemon. CIS handles the exceptions from BofhdAuth and gives the
client proper error messages.
TODO: It might be necessary to create own BofhdAuth classes that has CIS
specific functionality in the future. Don't know were it should be put, but
probably not here.
To support access control, add the following to your Cerebrum class:
from Cerebrum.modules.bofhd.auth import BofhdAuth
from Cerebrum.modules.bofhd.errors import PermissionDenied
class CerebrumFunctions(object):
__init__(self, operator_id):
self.auth = BofhdAuth(self.db)
self.operator_id = operator_id
def get_something(self, data):
# This could raise PermissionDenied, which is then handled by CIS:
self.auth.can_access_data(self.operator_id, data)
...
"""
from __future__ import unicode_literals
from mx import DateTime
from twisted.python import log
from rpclib.model.primitive import Unicode
from rpclib.decorator import rpc
import cereconf
from Cerebrum import Errors
from Cerebrum import QuarantineHandler
from Cerebrum.Utils import Factory
from Cerebrum.modules.cis import SoapListener
from Cerebrum.modules.cis.faults import (AuthenticationError,
NotAuthenticatedError)
class Authenticator(object):
"""Class for handling an authenticated entity. Could be subclassed for more
functionality, e.g. other timeouts.
"""
def __init__(self, id):
"""Feed the authenticator instance with the identifier of the
authenticated entity."""
if not id:
raise AuthenticationError('Empty identitifer is not allowed')
self.authenticated = True
self.id = id
self.started = DateTime.now()
self.lastAccessed = DateTime.now()
# TODO: more data we would need?
def expire(self):
"""Deauthenticate, log out."""
self.authenticated = False
self.id = None
def expired(self):
"""Check if the authentication has expired. This could be subclassed to
support different ways of expiration."""
if not self.authenticated:
return True
# TODO: add some expiration checking of the start timestamp, or do we
# only trust twisted's session timeout?
return False
class AuthenticationService(SoapListener.BasicSoapServer):
"""A generic authentication service. Subclass it to support different ways
of authenticating.
Please note that subclassing doesn't work in the standard way here, as the
class is not instantiated. Instead, the L{ctx} instance refers to
L{ctx.service_class}. This is why the authentication method is not created
in this class, but has to be defined in the subclasses instead, and should
call the static function _authenticate.
TODO: there is probably a cleaner way to solve the subclassing here.
"""
__in_header__ = SoapListener.SessionHeader
__out_header__ = SoapListener.SessionHeader
# What class to use for the authentication data
authenticatorFactory = Authenticator
# TODO this is a hack, check if these attributes are needed
site = None
@staticmethod
def _authenticate(ctx, id):
"""
The method for setting the authenticate values. Should only be called
internally by an Auth Service that has authenticated a client/user
properly. This method blindly accepts an entity as authenticated.
Note that we use twisted's session, which e.g. handles timeouts
automatically. Be aware that this method creates a new session, as it
is a security risk to reuse the old session due to session fixation
attacks. This is mostly important for web browsers, but still.
@type id: string or mixed
@param id: An identifier of the authenticated client, user or any
entity. Could for instance be its username.
"""
log.msg('DEBUG: Authenticated entity_id:%s' % id)
old = ctx.udc['session']
# creates a new session to use
new = ctx.service_class.site.makeSession()
for key in old:
new[key] = old[key]
# TODO: create a __copy__ in our SessionCacher to use instead
new['authenticated'] = ctx.service_class.authenticatorFactory(id)
ctx.udc['session'] = new
new.touch()
old.expire()
return new
@staticmethod
def _deauthenticate(ctx):
"""Call this for deauthenticating from the session, i.e. logging out of
this service.
"""
auth = ctx.udc['session'].get('authenticated', None)
if auth:
log.msg("DEBUG: deauthenticating: %s" % auth.id)
auth.expire()
del ctx.udc['session']['authenticated']
AuthenticationService.event_manager.add_listener(
'method_call', SoapListener.on_method_call_session)
AuthenticationService.event_manager.add_listener(
'method_return_object', SoapListener.on_method_exit_session)
class PasswordAuthenticationService(AuthenticationService):
"""Authentication Service where the auth is handled by username and
passwords.
"""
# Standard message to return in case of errors:
error_msg = 'Unknown username or password'
@rpc(Unicode, Unicode, _returns=Unicode, _throws=AuthenticationError)
def authenticate(ctx, username, password):
"""The authentication method, as some methods require you to be
authenticated before use. Please add your username and password.
@type username: Unicode
@param username: Your username. The user must exist in Cerebrum.
@type password: Unicode
@param password: Your password.
@rtype: Unicode
@return:
The new authenticated session ID. It is also available in the
returned SOAP headers, as session_id.
"""
if not username or not password:
raise AuthenticationError(ctx.service_class.error_msg)
# TODO: should add some short, random delay at startup, to avoid
# letting people know what went wrong, e.g. if a username exists or
# not.
# TODO: need to limit brute force attacks somehow, e.g. block per IP.
db = Factory.get('Database')()
account = Factory.get('Account')(db)
constant = Factory.get('Constants')(db)
try:
account.find_by_name(username)
except Errors.NotFoundError:
log.msg("INFO: auth: no such user: %s" % username)
# Close the database-connection
db.close()
raise AuthenticationError(ctx.service_class.error_msg)
# TODO: bofhd.py's bofhd_login has much functionality - put much of it
# into the Account class to be able to use same code in both places?
# For instance::
#
# ac.find_by_name(username) # could raise exception
# ac.authenticate(password) # could raise exception
#
# If success, we could create session etc.
# Check quarantines
quarantines = []
for qrow in account.get_entity_quarantine(only_active=True):
# The quarantine found in this row is currently
# active. Some quarantine types may not restrict
# access to bofhd even if they otherwise result in
# lock. Check therefore whether a found quarantine
# should be appended
#
# FIXME, Jazz 2008-04-08:
# This should probably be based on spreads or some
# such mechanism, but quarantinehandler and the import
# routines don't support a more appopriate solution yet
if not str(constant.Quarantine(qrow['quarantine_type'])) \
in cereconf.BOFHD_NONLOCK_QUARANTINES:
quarantines.append(qrow['quarantine_type'])
qh = QuarantineHandler.QuarantineHandler(db, quarantines)
if qh.should_skip() or qh.is_locked():
qua_repr = ", ".join(constant.Quarantine(q).description
for q in quarantines)
log.msg("INFO: user has active quarantine. Access denied: %s"
% qua_repr)
# Close the database-connection
db.close()
raise AuthenticationError(ctx.service_class.error_msg)
# User exists here, check password
if not account.verify_auth(password):
# Close the database-connection
db.close()
log.msg("INFO: Failed login for %s." % username)
raise AuthenticationError(ctx.service_class.error_msg)
ses = ctx.service_class._authenticate(ctx, account.entity_id)
return ses.uid
@rpc()
def deauthenticate(ctx):
"""Logs out of the service. No input is needed, makes use of the
session id in the SOAP headers.
"""
ctx.service_class._deauthenticate(ctx)
class UsernameAuthenticationService(AuthenticationService):
"""Authentication Service where the auth is handled by username only.
When using this service you are trusting the security of your client.
"""
# Standard message to return in case of errors:
error_msg = 'Unknown username'
@rpc(Unicode, _returns=Unicode, _throws=AuthenticationError)
def authenticate(ctx, username):
"""The authentication method, as some methods require you to be
authenticated before use.
@type username: Unicode
@param username: Your username. The user must exist in Cerebrum.
@rtype: Unicode
@return:
The new authenticated session ID. It is also available in the
returned SOAP headers, as session_id.
"""
if not username:
raise AuthenticationError(ctx.service_class.error_msg)
db = Factory.get('Database')()
account = Factory.get('Account')(db)
constant = Factory.get('Constants')(db)
try:
account.find_by_name(username)
except Errors.NotFoundError:
log.msg("INFO: auth: no such user: %s" % username)
raise AuthenticationError(ctx.service_class.error_msg)
# Check quarantines
quarantines = []
for qrow in account.get_entity_quarantine(only_active=True):
if not str(constant.Quarantine(qrow['quarantine_type'])) \
in cereconf.BOFHD_NONLOCK_QUARANTINES:
quarantines.append(qrow['quarantine_type'])
qh = QuarantineHandler.QuarantineHandler(db, quarantines)
if qh.should_skip() or qh.is_locked():
qua_repr = ", ".join(constant.Quarantine(q).description
for q in quarantines)
log.msg("INFO: user has active quarantine. Access denied: %s"
% qua_repr)
raise AuthenticationError(ctx.service_class.error_msg)
# User exists, let's authenticate it
ses = ctx.service_class._authenticate(ctx, account.entity_id)
return ses.uid
@rpc()
def deauthenticate(ctx):
"""Logs out of the service. No input is needed, makes use of the
session id in the SOAP headers.
"""
ctx.service_class._deauthenticate(ctx)
#
# Events
#
def on_method_authentication(ctx):
"""Event for checking that the client is authenticated before calling a
method. This event should be added to every Service class that require
authentication."""
log.msg('DEBUG: on_authentication')
try:
auth = ctx.udc['session']['authenticated']
log.msg('DEBUG: auth_%s' % auth.id)
if not auth.expired():
return True
except KeyError:
pass
log.msg('INFO: Not authenticated')
raise NotAuthenticatedError()
| unioslo/cerebrum | Cerebrum/modules/cis/auth.py | Python | gpl-2.0 | 14,400 |
import pytest
try:
from jose.backends.cryptography_backend import CryptographyRSAKey
from jose.backends.rsa_backend import RSAKey as PurePythonRSAKey
except ImportError:
PurePythonRSAKey = CryptographyRSAKey = None
from jose.constants import ALGORITHMS
from jose.exceptions import JWEError
from .test_RSA import PRIVATE_KEYS
CRYPTO_BACKENDS = (
pytest.param(PurePythonRSAKey, id="python_rsa"),
pytest.param(CryptographyRSAKey, id="pyca/cryptography"),
)
ENCODINGS = ("PKCS1", "PKCS8")
@pytest.mark.backend_compatibility
@pytest.mark.skipif(
None in (PurePythonRSAKey, CryptographyRSAKey),
reason="Multiple crypto backends not available for backend compatibility tests",
)
class TestBackendRsaCompatibility:
@pytest.mark.parametrize("BackendSign", CRYPTO_BACKENDS)
@pytest.mark.parametrize("BackendVerify", CRYPTO_BACKENDS)
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_signing_parity(self, BackendSign, BackendVerify, private_key):
key_sign = BackendSign(private_key, ALGORITHMS.RS256)
key_verify = BackendVerify(private_key, ALGORITHMS.RS256).public_key()
msg = b"test"
sig = key_sign.sign(msg)
# valid signature
assert key_verify.verify(msg, sig)
# invalid signature
assert not key_verify.verify(msg, b"n" * 64)
@pytest.mark.parametrize("encoding", ENCODINGS)
@pytest.mark.parametrize("BackendFrom", CRYPTO_BACKENDS)
@pytest.mark.parametrize("BackendTo", CRYPTO_BACKENDS)
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_public_key_to_pem(self, BackendFrom, BackendTo, encoding, private_key):
key = BackendFrom(private_key, ALGORITHMS.RS256)
key2 = BackendTo(private_key, ALGORITHMS.RS256)
key1_pem = key.public_key().to_pem(pem_format=encoding).strip()
key2_pem = key2.public_key().to_pem(pem_format=encoding).strip()
assert key1_pem == key2_pem
@pytest.mark.parametrize("encoding", ENCODINGS)
@pytest.mark.parametrize("BackendFrom", CRYPTO_BACKENDS)
@pytest.mark.parametrize("BackendTo", CRYPTO_BACKENDS)
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_private_key_to_pem(self, BackendFrom, BackendTo, encoding, private_key):
key = BackendFrom(private_key, ALGORITHMS.RS256)
key2 = BackendTo(private_key, ALGORITHMS.RS256)
key1_pem = key.to_pem(pem_format=encoding).strip()
key2_pem = key2.to_pem(pem_format=encoding).strip()
import base64
a = base64.b64decode(key1_pem[key1_pem.index(b"\n") : key1_pem.rindex(b"\n")])
b = base64.b64decode(key2_pem[key2_pem.index(b"\n") : key2_pem.rindex(b"\n")])
assert a == b
assert key1_pem == key2_pem
@pytest.mark.parametrize("encoding_save", ENCODINGS)
@pytest.mark.parametrize("encoding_load", ENCODINGS)
@pytest.mark.parametrize("BackendFrom", CRYPTO_BACKENDS)
@pytest.mark.parametrize("BackendTo", CRYPTO_BACKENDS)
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_public_key_load_cycle(self, BackendFrom, BackendTo, encoding_save, encoding_load, private_key):
key = BackendFrom(private_key, ALGORITHMS.RS256)
pem_pub_reference = key.public_key().to_pem(pem_format=encoding_save).strip()
pem_pub_load = key.public_key().to_pem(pem_format=encoding_load).strip()
pubkey_2 = BackendTo(pem_pub_load, ALGORITHMS.RS256)
assert pem_pub_reference == pubkey_2.to_pem(encoding_save).strip()
@pytest.mark.parametrize("encoding_save", ENCODINGS)
@pytest.mark.parametrize("encoding_load", ENCODINGS)
@pytest.mark.parametrize("BackendFrom", CRYPTO_BACKENDS)
@pytest.mark.parametrize("BackendTo", CRYPTO_BACKENDS)
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_private_key_load_cycle(self, BackendFrom, BackendTo, encoding_save, encoding_load, private_key):
key = BackendFrom(private_key, ALGORITHMS.RS256)
pem_reference = key.to_pem(pem_format=encoding_save).strip()
pem_load = key.to_pem(pem_format=encoding_load).strip()
key_2 = BackendTo(pem_load, ALGORITHMS.RS256)
assert pem_reference == key_2.to_pem(encoding_save).strip()
@pytest.mark.parametrize("backend_wrap", CRYPTO_BACKENDS)
@pytest.mark.parametrize("backend_unwrap", CRYPTO_BACKENDS)
@pytest.mark.parametrize("algorithm", filter(lambda x: x in ALGORITHMS.SUPPORTED, ALGORITHMS.RSA_KW))
@pytest.mark.parametrize("private_key", PRIVATE_KEYS)
def test_key_wrap_parity(self, backend_wrap, backend_unwrap, private_key, algorithm):
if algorithm in (ALGORITHMS.RSA_OAEP, ALGORITHMS.RSA_OAEP_256) and PurePythonRSAKey in (
backend_wrap,
backend_unwrap,
):
pytest.skip("Pure RSA does not support OAEP")
key_wrap = backend_wrap(private_key, algorithm).public_key()
key_unwrap = backend_unwrap(private_key, algorithm)
unwrapped_key = b"test"
wrapped_key = key_wrap.wrap_key(unwrapped_key)
# verify unwrap to original key
actual = key_unwrap.unwrap_key(wrapped_key)
assert actual == unwrapped_key
with pytest.raises(JWEError):
key_unwrap.unwrap_key(b"n" * 64)
| mpdavis/python-jose | tests/algorithms/test_RSA_compat.py | Python | mit | 5,282 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# See include/mesos/scheduler.hpp, include/mesos/executor.hpp and
# include/mesos/mesos.proto for more information documenting this
# interface.
import sys
import _mesos
# Base class for Mesos schedulers. Users' schedulers should extend
# this class to get default implementations of methods they don't
# override.
class Scheduler:
def registered(self, driver, frameworkId): pass
def resourceOffers(self, driver, offers): pass
def offerRescinded(self, driver, offerId): pass
def statusUpdate(self, driver, status): pass
def frameworkMessage(self, driver, message): pass
def slaveLost(self, driver, slaveId): pass
# Default implementation of error() prints to stderr because we
# can't make error() an abstract method in Python.
def error(self, driver, code, message):
print >> sys.stderr, "Error from Mesos: %s (code: %d)" % (message, code)
# Interface for Mesos scheduler drivers. Users may wish to extend this
# class in mock objects for tests.
class SchedulerDriver:
def start(self): pass
def stop(self, failover=False): pass
def abort(self) : pass
def join(self): pass
def run(self): pass
def requestResources(self, requests): pass
def launchTasks(self, offerId, tasks, filters=None): pass
def killTask(self, taskId): pass
def reviveOffers(self): pass
def sendFrameworkMessage(self, slaveId, executorId, data): pass
# Base class for Mesos executors. Users' executors should extend this
# class to get default implementations of methods they don't override.
class Executor:
def registered(self, driver, executorInfo, frameworkId, frameworkInfo,
slaveId, slaveInfo): pass
def launchTask(self, driver, task): pass
def killTask(self, driver, taskId): pass
def frameworkMessage(self, driver, message): pass
def shutdown(self, driver): pass
# Default implementation of error() prints to stderr because we
# can't make error() an abstract method in Python.
def error(self, driver, code, message):
print >> sys.stderr, "Error from Mesos: %s (code: %d)" % (message, code)
# Interface for Mesos executor drivers. Users may wish to extend this
# class in mock objects for tests.
class ExecutorDriver:
def start(self): pass
def stop(self): pass
def abort(self): pass
def join(self): pass
def run(self): pass
def sendStatusUpdate(self, status): pass
def sendFrameworkMessage(self, data): pass
# Alias the MesosSchedulerDriverImpl from _mesos. Ideally we would make this
# class inherit from SchedulerDriver somehow, but this complicates the C++
# code, and there seems to be no point in doing it in a dynamic language.
MesosSchedulerDriver = _mesos.MesosSchedulerDriverImpl
# Alias the MesosExecutorDriverImpl from _mesos. Ideally we would make this
# class inherit from ExecutorDriver somehow, but this complicates the C++
# code, and there seems to be no point in doing it in a dynamic language.
MesosExecutorDriver = _mesos.MesosExecutorDriverImpl
| samwhitlock/mesos-monitoring | src/python/src/mesos.py | Python | apache-2.0 | 3,736 |
import tweepy, re, json, operator
from tweepy import Stream
from tweepy.streaming import StreamListener
#auth information for Twitter API
consumer_key = 'your auth info'
consumer_secret = 'your auth info'
access_token = 'your auth info'
access_token_secret = 'your auth info'
#Tweepy Handler for accessing Oauth and Twitter API
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
#streaming API
class MyListener(StreamListener):
def on_data(self, data):
try:
with open('blizz2.json', 'a') as f:
f.write(data)
return True
except BaseException as e:
print("Error on_data: %s" % str(e))
return True
def on_error(self, status):
print(status)
return True
twitter_stream = Stream(auth, MyListener())
twitter_stream.filter(track=['@DustinBrowder'])
#--------- | dbroesch/Heroes-Sentiment-Analysis | twitterFetcher.py | Python | gpl-3.0 | 987 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.