repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Cgruppo/oppia
|
core/domain/exp_domain_test.py
|
Python
|
apache-2.0
| 19,626
| 0.000357
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for exploration domain objects and methods defined on them."""
__author__ = 'Sean Lip'
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import param_domain
from core.tests import test_utils
import feconf
import utils
class ExplorationDomainUnitTests(test_utils.GenericTestBase):
"""Test the exploration domain object."""
def test_validation(self):
"""Test validation of explorations."""
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id', '', '')
exploration.init_state_name = ''
exploration.states = {}
with self.assertRaisesRegexp(
utils.ValidationError, 'between 1 and 50 characters'):
exploration.validate()
exploration.title = 'Hello #'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character #'):
exploration.validate()
exploration.title = 'Title'
with self.assertRaisesRegexp(
utils.ValidationError, 'between 1 and 50 characters'):
exploration.validate()
exploration.category = 'Category'
# Note: If '/' ever becomes a valid state name, ensure that the rule
# editor frontend tenplate is fixed -- it currently uses '/' as a
# sentinel for an invalid state name.
bad_state = exp_domain.State.create_default_state('/')
exploration.states = {'/': bad_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character / in a state name'):
exploration.validate()
new_state = exp_domain.State.create_default_state('ABC')
new_state.update_interaction_id('TextInput')
# The 'states' property must be a non-empty dict of states.
exploration.states = {}
with self.assertRaisesRegexp(
utils.ValidationError, 'exploration has no states'):
exploration.validate()
exploration.states = {'A string #': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character # in a state name'):
exploration.validate()
exploration.states = {'A string _': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character _ in a state name'):
exploration.validate()
exploration.states = {'ABC': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'has no initial state name'):
exploration.validate()
exploration.init_state_name = 'initname'
with self.assertRaisesRegexp(
utils.ValidationError,
r'There is no state in \[\'ABC\'\] corresponding to '
'the exploration\'s initial state name initname.'):
exploration.validate()
exploration.states = {exploration.init_state_name: new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'destination ABC is not a valid'):
exploration.validate()
exploration.states = {
exploration.init_state_name: exp_domain.State.create_default_state(
exploration.init_state_name)
}
exploration.states[exploration.init_state_name].update_interaction_id(
'TextInput')
exploration.validate()
exploration.language_code = 'fake_code'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid language_code'):
exploration.validate()
exploration.language_code = 'English'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid language_code'):
exploration.validate()
exploration.language_code = 'en'
exploration.validate()
|
exploration.param_specs = 'A string'
with self.assertRaisesRegexp(
utils.ValidationError, 'param_specs to be a dict'):
exploration.validate()
exploration.param_sp
|
ecs = {
'@': param_domain.ParamSpec.from_dict({'obj_type': 'Int'})
}
with self.assertRaisesRegexp(
utils.ValidationError, 'Only parameter names with characters'):
exploration.validate()
exploration.param_specs = {
'notAParamSpec': param_domain.ParamSpec.from_dict(
{'obj_type': 'Int'})
}
exploration.validate()
def test_objective_validation(self):
"""Test that objectives are validated only in 'strict' mode."""
self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='Title', category='Category',
objective='')
exploration = exp_services.get_exploration_by_id('exp_id')
exploration.validate()
with self.assertRaisesRegexp(
utils.ValidationError, 'objective must be specified'):
exploration.validate(strict=True)
exploration.objective = 'An objective'
# Link the start state to the END state in order to make the
# exploration valid.
exploration.states[exploration.init_state_name].interaction.handlers[
0].rule_specs[0].dest = feconf.END_DEST
exploration.validate(strict=True)
def test_is_demo_property(self):
"""Test the is_demo property."""
demo = exp_domain.Exploration.create_default_exploration(
'0', 'title', 'category')
self.assertEqual(demo.is_demo, True)
notdemo1 = exp_domain.Exploration.create_default_exploration(
'a', 'title', 'category')
self.assertEqual(notdemo1.is_demo, False)
notdemo2 = exp_domain.Exploration.create_default_exploration(
'abcd', 'title', 'category')
self.assertEqual(notdemo2.is_demo, False)
class StateExportUnitTests(test_utils.GenericTestBase):
"""Test export of states."""
def test_export_state_to_dict(self):
"""Test exporting a state to a dict."""
exploration = exp_domain.Exploration.create_default_exploration(
'A different exploration_id', 'A title', 'A category')
exploration.add_states(['New state'])
state_dict = exploration.states['New state'].to_dict()
expected_dict = {
'content': [{
'type': 'text',
'value': u''
}],
'interaction': {
'customization_args': {},
'handlers': [{
'name': u'submit',
'rule_specs': [{
'definition': {
u'rule_type': u'default'
},
'dest': 'New state',
'feedback': [],
'param_changes': [],
}]
}],
'id': None,
},
'param_changes': [],
}
self.assertEqual(expected_dict, state_dict)
class YamlCreationUnitTests(test_utils.GenericTestBase):
"""Test creation of explorations from YAML files."""
SAMPLE_YAML_CONTENT = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: %s
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 4
skill_tags: []
states:
%s:
content:
- type: text
value: Welcome to the Oppia editor!<br><br>Anything you type here will be shown
to the learner playing your exploratio
|
ODiogoSilva/TriFusion
|
trifusion/ortho/OrthomclToolbox.py
|
Python
|
gpl-3.0
| 64,833
| 0.000586
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright 2012 Unknown <diogo@arch>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
try:
from process.sequence import Alignment
from base.plotter import bar_plot, multi_bar_plot
from process.error_handling import KillByUser
except ImportError:
from trifusion.process.sequence import Alignment
from trifusion.base.plotter import bar_plot, multi_bar_plot
from trifusion.process.error_handling import KillByUser
from collections import OrderedDict, Counter
import pickle
import os
import sqlite3
from os.path import join
import random
import string
import copy
class Cluster(object):
""" Object for clusters of the OrthoMCL groups file. It is useful to set a
number of attributes that will make subsequent filtration and
processing much easier """
def __init__(self, line_string):
"""
To initialize a Cluster object, only a string compliant with the
format of a cluster in an OrthoMCL groups file has to be provided.
This line should contain the name of the group, a colon, and the
sequences belonging to that group separated by whitespace
:param line_string: String of a cluster
"""
# Initializing attributes for parse_string
self.name = None
self.sequences = None
self.species_frequency = {}
# Initializing attributes for apply filter
# If the value is different than None, this will inform downstream
# objects of whether this cluster is compliant with the specified
# gene_threshold
self.gene_compliant = None
# If the value is different than None, this will inform downstream
# objects of whether this cluster is compliant with the specified
# species_threshold
self.species_compliant = None
self.parse_string(line_string)
def parse_string(self, cluster_string):
"""
Parses the string and sets the group name and sequence list attributes
"""
fields = cluster_string.split(":")
# Setting the name and sequence list of the clusters
self.name = fields[0].strip()
self.sequences = fields[1].strip().split()
# Setting the gene frequency for each species in the cluster
self.species_frequency = Counter([field.split("|")[0] for field in
self.sequences])
def remove_taxa(self, taxa_list):
"""
Removes the taxa contained in taxa_list from self.sequences and
self.species_frequency
:param taxa_list: list, each element should be a taxon name
"""
self.sequences = [x for x in self.sequences if x.split("|")[0]
not in taxa_list]
self.species_frequency = dict((taxon, val) for taxon, val in
self.species_frequency.items()
if taxon not in taxa_list)
def apply_filter(self, gene_threshold, species_threshold):
"""
This method will update two Cluster attributes, self.gene_flag and
self.species_flag, which will inform downstream objects if this
cluster respects the gene and species threshold
:param gene_threshold: Integer for the maximum number of gene copies
per species
:param species_threshold: Integer for the minimum number of species
present
"""
# Check whether cluster is compliant with species_threshold
if len(self.species_frequency) >= species_threshold and \
species_threshold:
self.species_compliant = True
else:
self.species_compliant = False
# Check whether cluster is compliant with gene_threshold
if max(self.species_frequency.values()) <= gene_threshold and \
gene_threshold:
self.gene_compliant = True
else:
self.gene_compliant = False
class OrthoGroupException(Exception):
pass
class GroupLight(object):
"""
Analogous to Group object but with several changes to reduce memory usage
"""
def __init__(self, groups_file, gene_threshold=None,
species_threshold=None, ns=None):
self.gene_threshold = gene_threshold if gene_threshold else None
self.species_threshold = species_threshold if species_threshold \
else None
|
# Attribute containing the list of included species
self.species_list = []
# Attribute tha
|
t will contain taxa to be excluded from analyses
self.excluded_taxa = []
self.species_frequency = []
# Attributes that will store the number (int) of cluster after gene and
# species filter
self.all_clusters = 0
self.num_gene_compliant = 0
self.num_species_compliant = 0
self.all_compliant = 0
# Attribute containing the total number of sequences
self.total_seqs = 0
# Attribute containing the maximum number of extra copies found in the
# clusters
self.max_extra_copy = 0
# Attribute with name of the group file, which will be an ID
self.name = os.path.abspath(groups_file)
self.table = groups_file.split(os.sep)[-1].split(".")[0]
# Initialize atribute containing the groups filtered using the gene and
# species threshold. This attribute can be updated at any time using
# the update_filtered_group method
self.filtered_groups = []
self._parse_groups(ns)
if type(self.species_threshold) is float:
self._get_sp_proportion()
def groups(self):
"""
Generator for group file. This replaces the self.groups attribute of
the original Group Object. Instead of loading the whole file into
memory, a generator is created to iterate over its contents. It may
run a bit slower but its a lot more memory efficient.
:return:
"""
file_handle = open(self.name)
for line in file_handle:
if line.strip() != "":
yield line.strip()
def iter_species_frequency(self):
"""
In order to prevent permanent changes to the species_frequency
attribute due to the filtering of taxa, this iterable should be used
instead of the said variable. This creates a temporary deepcopy of
species_frequency which will be iterated over and eventually modified.
"""
# Since the items of species_frequency are mutable, this ensures
# that even those objects are correctly cloned
sp_freq = copy.deepcopy(self.species_frequency)
for cl in sp_freq:
yield cl
def _remove_tx(self, line):
"""
Given a group line, remove all references to the excluded taxa
:param line: raw group file line
"""
new_line = "{}:".format(line.split(":")[0])
tx_str = "\t".join([x for x in line.split(":")[1].split() if
x.split("|")[0] not in self.excluded_taxa])
return new_line + tx_str
def _apply_filter(self, cl):
"""
Sets or updates the basic group statistics, such as the number of
orthologs compliant with the gene copy and minimum taxa filters.
:param cl: dictionary. Contains the number of occurrences for each
taxon present in the ortholog cluster
|
jbedorf/tensorflow
|
tensorflow/python/kernel_tests/where_op_test.py
|
Python
|
apache-2.0
| 8,003
| 0.010371
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.reverse_sequence_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import sys
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
class WhereOpTest(test.TestCase):
def _testWhere(self, x, truth, expected_err_re=None):
with self.cached_session(use_gpu=True):
ans = array_ops.where(x)
self.assertEqual([None, x.ndim], ans.get_shape().as_list())
if expected_err_re is None:
tf_ans = self.evaluate(ans)
self.assertAllClose(tf_ans, truth, atol=1e-10)
else:
with self.assertRaisesOpError(expected_err_re):
self.evaluate(ans)
def testWrongNumbers(self):
with self.session(use_gpu=True):
with self.assertRaises(ValueError):
array_ops.where([False, True], [1, 2], None)
with self.assertRaises(ValueError):
array_ops.where([False, True], None, [1, 2])
@test_util.run_deprecated_v1
def testBasicVec(self):
x = np.asarray([True, False])
truth = np.asarray([[0]], dtype=np.int64)
self._testWhere(x, truth)
x = np.asarray([False, True, False])
truth = np.asarray([[1]], dtype=np.int64)
self._testWhere(x, truth)
x = np.asarray([False, False, True, False, True])
truth = np.asarray([[2], [4]], dtype=np.int64)
self._testWhere(x, truth)
@test_util.run_deprecated_v1
def testRandomVec(self):
x = np.random.rand(1000000) > 0.5
truth = np.vstack([np.where(x)[0].astype(np.int64)]).T
self._testWhere(x, truth)
@test_util.run_deprecated_v1
def testBasicMat(self):
x = np.asarray([[True, False], [True, False]])
# Ensure RowMajor mode
truth = np.asarray([[0, 0], [1, 0]], dtype=np.int64)
self._testWhere(x, truth)
@test_util.run_deprecated_v1
def testBasic3Tensor(self):
x = np.asarray([[[True, False], [True, False]],
[[False, True], [False, True]],
[[False, False], [False, True]]])
# Ensure RowMajor mode
truth = np.asarray(
[[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 1, 1], [2, 1, 1]], dtype=np.int64)
self._testWhere(x, truth)
def _testRandom(self, dtype, expected_err_re=None):
shape = [127, 33, 53]
x = np.random.randn(*shape) + 1j * np.random.randn(*shape)
x = (np.random.randn(*shape) > 0).astype(dtype)
truth = np.where(np.abs(x) > 0) # Tuples of indices by axis.
truth = np.vstack(truth).T # Convert to [num_true, indices].
self._testWhere(x, truth, expected_err_re)
@test_util.run_deprecated_v1
def testRandomBool(self):
self._testRandom(np.bool)
@test_util.run_deprecated_v1
def testRandomInt32(self):
self._testRandom(np.int32)
@test_util.run_deprecated_v1
def testRandomInt64(self):
self._testRandom(np.int64)
@test_util.run_deprecated_v1
def testRandomFloat(self):
self._testRandom(np.float32)
@test_util.run_deprecated_v1
def testRandomDouble(self):
self._
|
testRandom(np.float64)
@test_util.run_deprecated_v1
def testRandomComplex64(self):
self._testRandom(np.complex64)
@test_util.run_deprecated_v1
def testRandomComplex128(self):
self._testRandom(np.complex128)
@test_util.run_deprecated_v1
def testRandomUint8(self):
self._testRandom(np.uint8)
@test_util.run_deprecated_v1
def testRandomInt8(self):
self._testRandom(np.int8)
@tes
|
t_util.run_deprecated_v1
def testRandomInt16(self):
self._testRandom(np.int16)
@test_util.run_deprecated_v1
def testThreeArgument(self):
x = np.array([[-2, 3, -1], [1, -3, -3]])
np_val = np.where(x > 0, x * x, -x)
with self.session(use_gpu=True):
tf_val = array_ops.where(constant_op.constant(x) > 0, x * x, -x).eval()
self.assertAllEqual(tf_val, np_val)
@test_util.run_deprecated_v1
def testBatchSelect(self):
x = np.array([[-2, 3, -1] * 64, [1, -3, -3] * 64] * 8192) # [16384, 192]
c_mat = np.array([[False] * 192, [True] * 192] * 8192) # [16384, 192]
c_vec = np.array([False, True] * 8192) # [16384]
np_val = np.where(c_mat, x * x, -x)
with self.session(use_gpu=True):
tf_val = array_ops.where(c_vec, x * x, -x).eval()
self.assertAllEqual(tf_val, np_val)
class WhereBenchmark(test.Benchmark):
def benchmarkWhere(self):
for (m, n, p, use_gpu) in itertools.product(
[10],
[10, 100, 1000, 10000, 100000, 1000000],
[0.01, 0.5, 0.99],
[False, True]):
name = "m_%d_n_%d_p_%g_use_gpu_%s" % (m, n, p, use_gpu)
device = "/%s:0" % ("gpu" if use_gpu else "cpu")
with ops.Graph().as_default():
with ops.device(device):
x = random_ops.random_uniform((m, n), dtype=dtypes.float32) <= p
v = resource_variable_ops.ResourceVariable(x)
op = array_ops.where(v)
with session.Session(config=benchmark.benchmark_config()) as sess:
v.initializer.run()
r = self.run_op_benchmark(sess, op, min_iters=100, name=name)
gb_processed_input = m * n / 1.0e9
# approximate size of output: m*n*p int64s for each axis.
gb_processed_output = 2 * 8 * m * n * p / 1.0e9
gb_processed = gb_processed_input + gb_processed_output
throughput = gb_processed / r["wall_time"]
print("Benchmark: %s \t wall_time: %0.03g s \t "
"Throughput: %0.03g GB/s" % (name, r["wall_time"], throughput))
sys.stdout.flush()
def benchmarkBatchSelect(self):
for (m, n, use_gpu) in itertools.product([1000, 10000, 100000],
[10, 100, 1000], [False, True]):
name = "m_%d_n_%d_use_gpu_%s" % (m, n, use_gpu)
device = "/%s:0" % ("gpu" if use_gpu else "cpu")
with ops.Graph().as_default():
with ops.device(device):
x_gen = random_ops.random_uniform([m, n], dtype=dtypes.float32)
y_gen = random_ops.random_uniform([m, n], dtype=dtypes.float32)
c_gen = random_ops.random_uniform([m], dtype=dtypes.float32) <= 0.5
x = resource_variable_ops.ResourceVariable(x_gen)
y = resource_variable_ops.ResourceVariable(y_gen)
c = resource_variable_ops.ResourceVariable(c_gen)
op = array_ops.where(c, x, y)
with session.Session(config=benchmark.benchmark_config()) as sess:
x.initializer.run()
y.initializer.run()
c.initializer.run()
r = self.run_op_benchmark(sess, op, min_iters=100, name=name)
# approximate size of output: m*n*2 floats for each axis.
gb_processed = m * n * 8 / 1.0e9
throughput = gb_processed / r["wall_time"]
print("Benchmark: %s \t wall_time: %0.03g s \t "
"Throughput: %0.03g GB/s" % (name, r["wall_time"], throughput))
sys.stdout.flush()
if __name__ == "__main__":
test.main()
|
jamespcole/home-assistant
|
homeassistant/components/trafikverket_weatherstation/sensor.py
|
Python
|
apache-2.0
| 5,210
| 0
|
"""
Weather information for air and road temperature, provided by Trafikverket.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.trafikverket_weatherstation/
"""
import asyncio
from datetime import timedelta
import logging
import aiohttp
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_API_KEY, CONF_MONITORED_CONDITIONS,
CONF_NAME, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, TEMP_CELSIUS)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIRE
|
MENTS = ['pytrafikverket==0.1.5.9']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Trafikverket"
ATTR_MEASURE_TIME = 'measure_time'
ATTR_ACTIVE = 'active'
CONF_STATION = 'station'
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
SCAN_INTERVAL
|
= timedelta(seconds=300)
SENSOR_TYPES = {
'air_temp': [
'Air temperature', TEMP_CELSIUS,
'air_temp', 'mdi:thermometer', DEVICE_CLASS_TEMPERATURE],
'road_temp': [
'Road temperature', TEMP_CELSIUS,
'road_temp', 'mdi:thermometer', DEVICE_CLASS_TEMPERATURE],
'precipitation': [
'Precipitation type', None,
'precipitationtype', 'mdi:weather-snowy-rainy', None],
'wind_direction': [
'Wind direction', '°',
'winddirection', 'mdi:flag-triangle', None],
'wind_direction_text': [
'Wind direction text', None,
'winddirectiontext', 'mdi:flag-triangle', None],
'wind_speed': [
'Wind speed', 'm/s',
'windforce', 'mdi:weather-windy', None],
'humidity': [
'Humidity', '%',
'humidity', 'mdi:water-percent', DEVICE_CLASS_HUMIDITY],
'precipitation_amount': [
'Precipitation amount', 'mm',
'precipitation_amount', 'mdi:cup-water', None],
'precipitation_amountname': [
'Precipitation name', None,
'precipitation_amountname', 'mdi:weather-pouring', None],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_STATION): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=[]):
[vol.In(SENSOR_TYPES)],
})
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the Trafikverket sensor platform."""
from pytrafikverket.trafikverket_weather import TrafikverketWeather
sensor_name = config[CONF_NAME]
sensor_api = config[CONF_API_KEY]
sensor_station = config[CONF_STATION]
web_session = async_get_clientsession(hass)
weather_api = TrafikverketWeather(web_session, sensor_api)
dev = []
for condition in config[CONF_MONITORED_CONDITIONS]:
dev.append(TrafikverketWeatherStation(
weather_api, sensor_name, condition, sensor_station))
if dev:
async_add_entities(dev, True)
class TrafikverketWeatherStation(Entity):
"""Representation of a Trafikverket sensor."""
def __init__(self, weather_api, name, sensor_type, sensor_station):
"""Initialize the sensor."""
self._client = name
self._name = SENSOR_TYPES[sensor_type][0]
self._type = sensor_type
self._state = None
self._unit = SENSOR_TYPES[sensor_type][1]
self._station = sensor_station
self._weather_api = weather_api
self._icon = SENSOR_TYPES[sensor_type][3]
self._device_class = SENSOR_TYPES[sensor_type][4]
self._weather = None
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self._client, self._name)
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes of Trafikverket Weatherstation."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_ACTIVE: self._weather.active,
ATTR_MEASURE_TIME: self._weather.measure_time,
}
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from Trafikverket and updates the states."""
try:
self._weather = await self._weather_api.async_get_weather(
self._station)
self._state = getattr(
self._weather,
SENSOR_TYPES[self._type][2])
except (asyncio.TimeoutError,
aiohttp.ClientError, ValueError) as error:
_LOGGER.error("Could not fetch weather data: %s", error)
|
openmotics/gateway
|
tools/api-tester.py
|
Python
|
agpl-3.0
| 3,464
| 0.001443
|
import requests
import ujson as json
BASE_URL = 'http://localhost:8088/' # set the port and IP address correct
USERNAME = '<Your username here>'
PASSWD = '<Your password here>'
TOKEN = '' # will be filled in by the login request
VERBOSE = 1 # verbose 0 => No output, 1 => minimal output, 2 => full output
def create_url(endpoint):
url = '{}{}'.format(BASE_URL, endpoint)
return url
def pretty_print_dict(data, prefix=''):
result = ''
if data is None:
return '\n'
for k, v in data.items():
result += '{}{}: {}\n'.format(prefix, k, v)
return result
def pretty_print_output(output):
try:
json_dict = json.loads(output)
json_str = json.dumps(json_dict, indent=4)
return json_str
except Exception:
return output
def api_call(endpoint, method='get', headers=None, params=None, body=None, authenticated=True, verbose=None):
if verbose is None:
verbose = VERBOSE
if headers is None:
headers = {}
if TOKEN != '' and authenticated:
headers.update({'Authorization': 'Bearer {}'.format(TOKEN)})
method = method.lower()
url = create_url(endpoint)
if params is None:
params = {}
if verbose == 1:
print('Perform request: {}'.format(url))
elif verbose == 2:
headers_str = pretty_print_dict(headers, prefix=' ')
params_str = pretty_print_dict(params, prefix=' ')
print('Perform request:\n url: {},\n method: {},\n headers:\n{} params:\n{} body: {}\n'
.format(url, method, headers_str, params_str, body))
if method == 'get':
response = requests.get(url=url, headers=headers, params=params)
elif method == 'post':
response = requests.post(url=url, headers=headers, params=params, data=body)
resp_body = '\n'.join([str(x.decode()) for x in response.iter_lines()])
if verbose == 1:
print(' => Response body: {}'.format(resp_body))
print('--------------------------------------------')
elif verbose == 2:
headers = pretty_print_dict(response.headers, prefix=' ')
body = pretty_print_output(resp_body)
body_indent = ''
for line in body.splitlines():
body
|
_indent += ' {}\n'.format(line)
print('Response:\n code: {},\n headers:\n{} body:\n{}'
.format(response.status_code, headers, body_indent))
print('--------------------------------------------')
return response
def login(verbose=None):
|
global TOKEN
if verbose == None:
verbose = VERBOSE
params = {'username': USERNAME, 'password': PASSWD}
resp = api_call('login', params=params, authenticated=False, verbose=verbose)
resp_json = resp.json()
if 'token' in resp_json:
token = resp.json()['token']
TOKEN = token
if verbose > 0:
print(' => logged in and received token: {}'.format(token))
print('--------------------------------------------')
else:
raise RuntimeError('Could not log in to the gateway')
return token
def main():
# do requests here
# Example requests to the get_version endpoint
login(verbose=1)
api_call('get_version', authenticated=False)
api_call('get_version', authenticated=True)
api_call('get_version', verbose=2, authenticated=False)
api_call('get_version', verbose=2, authenticated=True)
if __name__ == '__main__':
main()
|
anhstudios/swganh
|
data/scripts/templates/object/draft_schematic/space/weapon/shared_wpn_heavy_disruptor.py
|
Python
|
mit
| 461
| 0.047722
|
#### NOTICE: THIS F
|
ILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/space/weapon/shared_wpn_heavy_disruptor.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return resu
|
lt
|
bkjones/loghetti
|
test/apachelogs_test.py
|
Python
|
bsd-3-clause
| 930
| 0.005376
|
''' Simple test for apachelogs '''
import unittest
from apachelogs import ApacheLogFile
class apachelogs_test(unittest.TestCase):
def test_foo(self):
log = ApacheLogFile('test.log')
line = iter(log).next()
self.assertEquals(line.ip, '1
|
27.0.0.1')
self.assertEquals(line.ident, '-')
self.assertEquals(line.http_user, 'frank')
self.assertEquals(line.time, '5/Oct/2000:13:55:36 -0700')
self.assertEquals(line.request_line, 'GET /apache_pb.gif?foo=bar&baz=zip HTTP/1.0')
self.assertEquals(line.
|
http_response_code, '200')
self.assertEquals(line.http_response_size, '2326')
self.assertEquals(line.referrer, 'http://www.example.com/start.html')
self.assertEquals(line.user_agent, 'Mozilla/4.08 [en] (Win98; I ;Nav)')
log.close()
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
|
FabianN/autopkg_recipies
|
AdobeReader/AdobeReaderUpdatesURLProvider.py
|
Python
|
mit
| 5,706
| 0.000526
|
#!/usr/bin/python
#
# Copyright 2014: wycomco GmbH (choules@wycomco.de)
# 2015: modifications by Tim Sutton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/lice
|
nses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for t
|
he specific language governing permissions and
# limitations under the License.
"""See docstring for AdobeReaderURLProvider class"""
# Disabling warnings for env members and imports that only affect recipe-
# specific processors.
#pylint: disable=e1101
import urllib2
import plistlib
from autopkglib import Processor, ProcessorError
__all__ = ["AdobeReaderUpdatesURLProvider"]
MAJOR_VERSION_DEFAULT = "11"
CHECK_OS_VERSION_DEFAULT = "10.8"
MAJOR_VERSION_MATCH_STR = "adobe/reader/mac/%s"
AR_UPDATER_DOWNLOAD_URL = (
"http://download.adobe.com/"
"pub/adobe/reader/mac/%s.x/%s/misc/AdbeRdrUpd%s.dmg")
AR_UPDATER_DOWNLOAD_URL2 = "http://ardownload.adobe.com"
AR_UPDATER_BASE_URL = "https://armmf.adobe.com/arm-manifests/mac"
AR_URL_TEMPLATE = "/%s/current_version_url_template.txt"
AR_MANIFEST_TEMPLATE = "/%s/manifest_url_template.txt"
AR_MAJREV_IDENTIFIER = "{MAJREV}"
OSX_MAJREV_IDENTIFIER = "{OS_VER_MAJ}"
OSX_MINREV_IDENTIFIER = "{OS_VER_MIN}"
AR_PROD_IDENTIFIER = '{PROD}'
AR_PROD_ARCH_IDENTIFIER = '{PROD_ARCH}'
AR_PROD = 'com_adobe_Reader'
AR_PROD_ARCH = 'univ'
class AdobeReaderUpdatesURLProvider(Processor):
"""Provides URL to the latest Adobe Reader release."""
description = __doc__
input_variables = {
"major_version": {
"required": False,
"description": ("Major version. Examples: '10', '11'. Defaults to "
"%s" % MAJOR_VERSION_DEFAULT)
},
"os_version": {
"required": False,
"default": CHECK_OS_VERSION_DEFAULT,
"description": ("Version of OS X to check. Default: %s" %
CHECK_OS_VERSION_DEFAULT)
}
}
output_variables = {
"url": {
"description": "URL to the latest Adobe Reader release.",
},
"version": {
"description": "Version for this update.",
},
}
def get_reader_updater_pkg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_MANIFEST_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open manifest template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
version_string = version_string.replace(AR_PROD_IDENTIFIER, AR_PROD)
version_string = version_string.replace(AR_PROD_ARCH_IDENTIFIER, AR_PROD_ARCH)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
plist = plistlib.readPlistFromString(url_handle.read())
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get or read manifest: %s" % (err))
url = AR_UPDATER_DOWNLOAD_URL2 + plist['PatchURL']
return url
def get_reader_updater_dmg_url(self, major_version):
'''Returns download URL for Adobe Reader Updater DMG'''
request = urllib2.Request(
AR_UPDATER_BASE_URL + AR_URL_TEMPLATE % major_version)
try:
url_handle = urllib2.urlopen(request)
version_string = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't open URL template: %s" % (err))
os_maj, os_min = self.env["os_version"].split(".")
version_string = version_string.replace(
AR_MAJREV_IDENTIFIER, major_version)
version_string = version_string.replace(OSX_MAJREV_IDENTIFIER, os_maj)
version_string = version_string.replace(OSX_MINREV_IDENTIFIER, os_min)
request = urllib2.Request(
AR_UPDATER_BASE_URL + version_string)
try:
url_handle = urllib2.urlopen(request)
version = url_handle.read()
url_handle.close()
except BaseException as err:
raise ProcessorError("Can't get version string: %s" % (err))
versioncode = version.replace('.', '')
url = AR_UPDATER_DOWNLOAD_URL % (major_version, version, versioncode)
return (url, version)
def main(self):
major_version = self.env.get("major_version", MAJOR_VERSION_DEFAULT)
(url, version) = self.get_reader_updater_dmg_url(major_version)
# only need the version, getting the URL from the manifest now
url = self.get_reader_updater_pkg_url(major_version)
self.env["url"] = url
self.env["version"] = version
self.output("Found URL %s" % self.env["url"])
if __name__ == "__main__":
PROCESSOR = AdobeReaderUpdatesURLProvider()
PROCESSOR.execute_shell()
|
tfroehlich82/EventGhost
|
plugins/Mouse/__init__.py
|
Python
|
gpl-2.0
| 21,487
| 0.004328
|
# -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
import wx
from math import cos, pi, radians, sin
from Queue import Queue
from sys import maxint
from threading import Thread
from time import clock, sleep
from win32api import EnumDisplayMonitors, GetSystemMetrics, mouse_event as mouse_event2
from win32con import MOUSEEVENTF_ABSOLUTE, MOUSEEVENTF_MOVE
# Local imports
import eg
from eg import HasActiveHandler
from eg.cFunctions import SetMouseCallback
from eg.WinApi.Dynamic import GetCursorPos, mouse_event, POINT, SetCursorPos
from eg.WinApi.Utils import GetMonitorDimensions
ICON = """iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABmJLR0QA/wD/AP+gvaeT
AAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH1QQIDRgEM71mAAAAADV0RVh0Q29tbWVudAAoYy
kgMjAwNCBKYWt1YiBTdGVpbmVyCgpDcmVhdGVkIHdpdGggVGhlIEdJTVCQ2YtvAAACHElEQVQ4y42Q
zUtUURjGf/fcyatz73Wiklwo2R/QplXQ/AURlLYJcrJNQrvQahYFI0wQ7lu0azNtYlAj2rUJRFciUf
kRgUwOM6Y5jePXfNzznhZ+NOpIvpvD+5zn/M7DY3Fo0ul0JzBQLpdvG2M8wHi++6r7Zs+Tet/Yu9Hr
W5tb/Yqjc2m7vB3zfPd7LBbzPd/tK/5Zu5ZKpZZSb1LZ0bGRG7u+F2E3PG0dfp1MJl+2tvq9xeLaJv
AxkUj01aW7UKtV3xvYam525nq6b92znieHEkqpIWwLpRSV7YBoNEoun2VhIUOTY6ODAAmkqJT68PRZ
orf+w1AoFBq63//A2LZthcNhhoeH0VrjNLVgYTHw8DGlUonC6u/IyEj6DnAAoAAq1ar1c3FxX8zlcl
QqlX97Po/XGrEa9MWREuPxOPl8nmw2Szwe538Tql9WVlZoa2tjcHDwgHZiwGqhwGqhgO/7dHZ0MDM7
e7IEG6V1zp05uy/WghrLv5YPaBul9eMBnufuRLXAwsIYQYsgRhCt0SK0n2/nuBKnxBi00YhotA7Qoh
ERRAsiBiOy559qBJjVWmMrmyAQtNboYBcmgojQdMrZ8083Anyan5/D8zxaWpqxlEKLoPVOfNd1iZyO
MDPzDeBHow7efv3yuc9xnGhX10U8z8MAGMPOYchkFlhaygG8bgSoVavVu5MT448mJ8YvA1cadJUBrg
Jrhy/+AqGrAMOnH86mAAAAAElFTkSuQmCC"""
eg.RegisterPlugin(
name = "Mouse",
author = (
"Bitmonster",
"Sem;colon",
),
version = "1.1.1",
description = (
"Actions to control the mouse cursor and emulation of mouse events."
),
kind = "core",
guid = "{6B1751BF-F94E-4260-AB7E-64C0693FD959}",
icon = ICON,
url = "http://www.eventghost.net/forum/viewtopic.php?f=9&t=5481",
)
class Mouse(eg.PluginBase):
def __init__(self):
self.AddEvents()
self.AddAction(LeftButton)
self.AddAction(LeftDoubleClick)
self.AddAction(ToggleLeftButton)
self.AddAction(MiddleButton)
self.AddAction(MoveAbsolute)
self.AddAction(MoveRelative)
self.AddAction(RightButton)
self.AddAction(RightDoubleClick)
self.AddAction(GoDirection)
self.AddAction(MouseWheel)
@eg.LogIt
def __close__(self):
pass
def __start__(self):
self.thread = MouseThread()
self.leftMouseButtonDown = False
self.lastMouseEvent = None
self.mouseButtonWasBlocked = [False, False, False, False, False]
SetMouseCallback(self.MouseCallBack)
@eg.LogIt
def __stop__(self):
SetMouseCallback(None)
self.thread.receiveQueue.put([-1])
def MouseCallBack(self, buttonName, buttonNum, param):
if param:
if self.lastMouseEvent:
self.lastMouseEvent.SetShouldEnd()
shouldBlock = HasActiveHandler("Mouse." + buttonName)
self.mouseButtonWasBlocked[buttonNum] = shouldBlock
self.lastMouseEvent = self.TriggerEnduringEvent(buttonName)
return shouldBlock
else:
if self.lastMouseEvent:
self.lastMouseEvent.SetShouldEnd()
return self.mouseButtonWasBlocked[buttonNum]
return False
class MouseThread(Thread):
currentAngle = 0
newAngle = 0
acceleration = 0
speed = 0
maxTicks = 5
yRemainder = 0
xRemainder = 0
leftButtonDown = False
lastTime = 0
initSpeed = 0.06
maxSpeed = 7.0
useAlternateMethod = False
def __init__(self):
Thread.__init__(self, name="MouseThread")
self.receiveQueue = Queue(2048)
self.start()
@eg.LogItWithReturn
def run(self):
stop = False
point = POINT()
while True:
self.lastTime = clock()
if not self.receiveQueue.empty():
data = self.receiveQueue.get()
if data[0] == -1:
break
elif data[0] == -2:
stop = True
else:
self.newAngle = radians(data[0])
self.initSpeed = data[1]
self.maxSpeed = data[2]
self.acceleration = data[3]
self.useAlternateMethod = data[4]
if stop:
self.acceleration = 0
self.speed = 0
stop = False
continue
if self.acceleration == 0:
sleep(0.05)
continue
ticks = 10
if self.speed == 0:
self.currentAngle = self.newAngle
self.speed = self.initSpeed
else:
diff = self.newAngle - self.currentAngle
if diff > pi:
diff = diff - 2 * pi
elif diff < -1 * pi:
diff = diff + 2 * pi
self.currentAngle = self.currentAngle + (diff / 20)
self.speed = self.speed + (self.speed * self.acceleration * ticks)
if self.speed > self.maxSpeed:
self.speed = self.maxSpeed
elif self.speed <= 0:
self.speed = 0
factor = self.speed * (ticks / 10)
xCurrent = sin(self.currentAngle) * factor + self.xRemainder
yCurrent = -1 * cos(self.currentAngle) * factor + self.yRemainder
x = int(xCurrent)
y = int(yCurrent)
self.xRemainder = xCurrent - x
self.yRemainder = yCurrent - y
try:
if self.useAlternateMethod:
mouse_event2(MOUSEEVENTF_MOVE, x, y)
else:
GetCursorPos(point)
SetCursorPos(point.x + x, point.y + y)
except:
pass
if self.speed == 0:
self.acceleration = 0
waitTicks = 0.01 - (clock() - self.lastTime)
if waitTicks < 0:
waitTicks = 0.0
sleep(waitTicks)
class GoDirection(eg.ActionBase):
name = "Start Movement"
description = "Starts cursor movement in the specified direction."
class text:
label = u"Start cursor movement in direction %.2f\u00B0"
text1 = "Start moving cursor in direction"
text2 = "degrees. (0-360)"
text3 = "Initial mouse speed:"
text4 = "Maximum mouse speed:"
text5 = "Acceleration factor:"
label_AM = "Use alternate method"
def __call__(self, direction=0, initSpeed = 60, maxSpeed = 7000, accelerationFactor = 3, useAlternateMethod=False):
def UpFunc():
self.plugin.thread.receiveQueue.put([-2])
self.plugin.thread.re
|
ceiveQueue.put([float(direction), float(initSpeed) / 1000, float(maxSpeed) / 1000, float(accelerationFactor) / 1000, useAlternateMethod])
eg.event.AddUpFunc(UpFunc)
def Configure(self, direction=0, initSpeed = 60, maxSpeed = 7000, accelerationFactor = 3, useAlternateMet
|
hod=False):
text = self.text
panel = eg.ConfigPanel()
direction = float(direction)
valueCtrl = panel.SpinNumCtrl(float(direction), min=0, max=360)
panel.AddLine(text.text1, valueCtrl, text.text2)
initSpeedLabel = wx.StaticText(panel, -1, t
|
iLoop2/ResInsight
|
ThirdParty/Ert/devel/python/python/ert/enkf/enkf_main.py
|
Python
|
gpl-3.0
| 13,314
| 0.007736
|
# Copyright (C) 2012 Statoil ASA, Norway.
#
# The file 'ecl_kw.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from ert.cwrap import BaseCClass, CWrapper
from ert.enkf import AnalysisConfig, EclConfig, EnkfObs, EnKFState, LocalConfig, ModelConfig, EnsembleConfig, PlotConfig, SiteConfig, ENKF_LIB, EnkfSimulationRunner, EnkfFsManager, ErtWorkflowList, PostSimulationHook
from ert.enkf.enums import EnkfInitModeEnum
from ert.util import SubstitutionList, Log
class EnKFMain(BaseCClass):
def __init__(self, model_config, strict=True):
c_ptr = EnKFMain.cNamespace().bootstrap(model_config, strict, False)
super(EnKFMain, self).__init__(c_ptr)
self.__simulation_runner = EnkfSimulationRunner(self)
self.__fs_manager = EnkfFsManager(self)
@classmethod
def createCReference(cls, c_pointer, parent=None):
obj = super(EnKFMain, cls).createCReference(c_pointer, parent)
obj.__simulation_runner = EnkfSimulationRunner(obj)
obj.__fs_manager = EnkfFsManager(obj)
return obj
@staticmethod
def createNewConfig(config_file, storage_path, case_name, dbase_type, num_realizations):
EnKFMain.cNamespace().create_new_config(config_file, storage_path, case_name, dbase_type, num_realizations)
def getRealisation(self , iens):
""" @rtype: EnKFState """
if 0 <= iens < self.getEnsembleSize():
return EnKFMain.cNamespace().iget_state(self, iens).setParent(self)
else:
raise IndexError("iens value:%d invalid Valid range: [0,%d)" % (iens , len(self)))
def set_eclbase(self, eclbase):
EnKFMain.cNamespace().set_eclbase(self, eclbase)
def umount(self):
self.__fs_manager.umount()
def free(self):
self.umount()
EnKFMain.cNamespace().free(self)
def getEnsembleSize(self):
""" @rtype: int """
return EnKFMain.cNamespace().get_ensemble_size(self)
def resizeEnsemble(self, value):
EnKFMain.cNamespace().resize_ensemble(self, value)
def ensembleConfig(self):
""" @rtype: EnsembleConfig """
return EnKFMain.cNamespace().get_ens_config(self).setParent(self)
def analysisConfig(self):
""" @rtype: AnalysisConfig """
return EnKFMain.cNamespace().get_analysis_config(self).setParent(self)
def getModelConfig(self):
""" @rtype: ModelConfig """
return EnKFMain.cNamespace().get_model_config(self).setParent(self)
def logh(self):
""" @rtype: Log """
return EnKFMain.cNamespace().get_logh(self).setParent(self)
def local_config(self):
""" @rtype: LocalConfig """
return EnKFMain.cNamespace().get_local_config(self).setParent(self)
def siteConfig(self):
""" @rtype: SiteConfig """
return EnKFMain.cNamespace().get_site_config(self).setParent(self)
def eclConfig(self):
""" @rtype: EclConfig """
return EnKFMain.cNamespace().get_ecl_config(self).setParent(self)
def plotConfig(self):
""" @rtype: PlotConfig """
return EnKFMain.cNamespace().get_plot_config(self).setParent(self)
def set_datafile(self, datafile):
EnKFMain.cNamespace().set_datafile(self, datafile)
def get_schedule_prediction_file(self):
schedule_prediction_file = EnKFMain.cNamespace().get_schedule_prediction_file(self)
return schedule_prediction_file
def set_schedule_prediction_file(self, file):
EnKFMain.cNamespace().set_schedule_prediction_file(self, file)
def getDataKW(self):
""" @rtype: SubstitutionList """
return EnKFMain.cNamespace().get_data_kw(self)
def clearDataKW(self):
EnKFMain.cNamespace().clear_data_kw(self)
def addDataKW(self, key, value):
EnKFMain.cNamespace().add_data_kw(self, key, value)
def getMountPoint(self):
return EnKFMain.cNamespace().get_mount_point(self)
def del_node(self, key):
EnKFMain.cNamespace().del_node(self, key)
def getObservations(self):
""" @rtype: EnkfObs """
return EnKFMain.cNamespace().get_obs(self).setParent(self)
def load_obs(self, obs_config_file):
EnKFMain.cNamespace().load_obs(self, obs_config_file)
def reload_obs(self):
EnKFMain.cNamespace().reload_obs(self)
def get_pre_clear_runpath(self):
pre_clear = EnKFMain.cNamespace().get_pre_clear_runpath(self)
return pre_clear
def set_pre_clear_runpath(self, value):
EnKFMain.cNamespace().set_pre_clear_runpath(self, value)
def iget_keep_runpath(self, iens):
ikeep = EnKFMain.cNamespace().iget_keep_runpath(self, iens)
return ikeep
def iset_keep_runpath(self, iens, keep_runpath):
EnKFMain.cNamespace().iset_keep_runpath(self, iens, keep_runpath)
def get_templates(self):
return EnKFMain.cNamespace().get_templates(self).setParent(self)
def get_site_config_file(self):
site_conf_file = EnKFMain.cNamespace().get_site_config_file(self)
return site_conf_file
def getUserConfigFile(self):
""" @rtype: str """
config_file = EnKFMain.cNamespace().get_user_config_file(self)
return config_file
def getHistoryLength(self):
return EnKFMain.cNamespace().get_history_length(self)
def getMemberRunningState(self, ensemble_member):
""" @rtype: EnKFState """
|
return EnKFMain.cNamespace().iget_state(self, ensemble_member).setParent(self)
def get_observations(self, user_key, obs_count, obs_x, obs_y, obs_std):
EnKFMain.cNamespace().get_observations(self, user_key, obs_count, obs_x, obs_y, obs_std)
def get_observation_count(self, user_key):
return EnKFMain.cNamespace().get_observation_count(self, user_key)
def getEnkfSimulationRunner(self)
|
:
""" @rtype: EnkfSimulationRunner """
return self.__simulation_runner
def getEnkfFsManager(self):
""" @rtype: EnkfFsManager """
return self.__fs_manager
def getWorkflowList(self):
""" @rtype: ErtWorkflowList """
return EnKFMain.cNamespace().get_workflow_list(self).setParent(self)
def getPostSimulationHook(self):
""" @rtype: PostSimulationHook """
return EnKFMain.cNamespace().get_qc_module(self)
def exportField(self, keyword, path, iactive, file_type, report_step, state, enkfFs):
"""
@type keyword: str
@type path: str
@type iactive: BoolVector
@type file_type: EnkfFieldFileFormatEnum
@type report_step: int
@type state: EnkfStateType
@type enkfFs: EnkfFs
"""
assert isinstance(keyword, str)
return EnKFMain.cNamespace().export_field_with_fs(self, keyword, path, iactive, file_type, report_step, state, enkfFs)
def loadFromForwardModel(self, realization, iteration, fs):
EnKFMain.cNamespace().load_from_forward_model(self, iteration, realization, fs)
def submitSimulation(self , run_arg):
EnKFMain.cNamespace().submit_simulation( self , run_arg)
def getRunContextENSEMPLE_EXPERIMENT(self , fs , iactive , init_mode = EnkfInitModeEnum.INIT_CONDITIONAL , iteration = 0):
return EnKFMain.cNamespace().alloc_run_context_ENSEMBLE_EXPERIMENT( self , fs , iactive , init_mode , iteration )
##################################################################
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerType("enkf_main", EnKFMain)
cwrapper.registerType("enkf_main_ref", EnKFMain.createCReference)
EnKFMain.cNamespace().bootstrap = cwrapper.prototype("c_void_p e
|
akirakoyasu/ansible-modules-core
|
network/basics/uri.py
|
Python
|
gpl-3.0
| 19,263
| 0.006489
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Romeo Theriault <romeot () hawaii.edu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# see examples/playbooks/uri.yml
import cgi
import shutil
import tempfile
import base64
import datetime
try:
import json
except ImportError:
import simplejson as json
DOCUMENTATION = '''
---
module: uri
short_description: Interacts with webservices
description:
- Interacts with HTTP and HTTPS web services and supports Digest, Basic and WSSE
HTTP authentication mechanisms.
version_added: "1.1"
options:
url:
description:
- HTTP or HTTPS URL in the form (http|https)://host.domain[:port]/path
required: true
default: null
aliases: []
dest:
description:
- path of where to download the file to (if desired). If I(dest) is a directory, the basename of the file on the remote server will be used.
required: false
default: null
user:
description:
- username for the module to use for Digest, Basic or WSSE authentication.
required: false
default: null
password:
description:
- password for the module to use for Digest, Basic or WSSE authentication.
req
|
uired: false
default: null
body:
description:
- The body of the http request/response to the web service.
required: false
default: null
body_format:
description:
- The serialization format of the body. When set to json, encodes the body argument and automatically sets the Content-Type header accordingly.
required: false
choices: [ "raw", "json" ]
default: raw
method:
description:
|
- The HTTP method of the request or response.
required: false
choices: [ "GET", "POST", "PUT", "HEAD", "DELETE", "OPTIONS", "PATCH", "TRACE", "CONNECT", "REFRESH" ]
default: "GET"
return_content:
description:
- Whether or not to return the body of the request as a "content" key in the dictionary result. If the reported Content-type is "application/json", then the JSON is additionally loaded into a key called C(json) in the dictionary results.
required: false
choices: [ "yes", "no" ]
default: "no"
force_basic_auth:
description:
- httplib2, the library used by the uri module only sends authentication information when a webservice
responds to an initial request with a 401 status. Since some basic auth services do not properly
send a 401, logins will fail. This option forces the sending of the Basic authentication header
upon initial request.
required: false
choices: [ "yes", "no" ]
default: "no"
follow_redirects:
description:
- Whether or not the URI module should follow redirects. C(all) will follow all redirects.
C(safe) will follow only "safe" redirects, where "safe" means that the client is only
doing a GET or HEAD on the URI to which it is being redirected. C(none) will not follow
any redirects. Note that C(yes) and C(no) choices are accepted for backwards compatibility,
where C(yes) is the equivalent of C(all) and C(no) is the equivalent of C(safe). C(yes) and C(no)
are deprecated and will be removed in some future version of Ansible.
required: false
choices: [ "all", "safe", "none" ]
default: "safe"
creates:
description:
- a filename, when it already exists, this step will not be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will not be run.
required: false
status_code:
description:
- A valid, numeric, HTTP status code that signifies success of the request. Can also be comma separated list of status codes.
required: false
default: 200
timeout:
description:
- The socket level timeout in seconds
required: false
default: 30
HEADER_:
description:
- Any parameter starting with "HEADER_" is a sent with your request as a header.
For example, HEADER_Content-Type="application/json" would send the header
"Content-Type" along with your request with a value of "application/json".
required: false
default: null
others:
description:
- all arguments accepted by the M(file) module also work here
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only
set to C(no) used on personally controlled sites using self-signed
certificates. Prior to 1.9.2 the code defaulted to C(no).
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.9.2'
# informational: requirements for nodes
requirements: [ urlparse, httplib2 ]
author: "Romeo Theriault (@romeotheriault)"
'''
EXAMPLES = '''
# Check that you can connect (GET) to a page and it returns a status 200
- uri: url=http://www.example.com
# Check that a page returns a status 200 and fail if the word AWESOME is not in the page contents.
- action: uri url=http://www.example.com return_content=yes
register: webpage
- action: fail
when: "'illustrative' not in webpage.content"
# Create a JIRA issue
- uri:
url: https://your.jira.example.com/rest/api/2/issue/
method: POST
user: your_username
password: your_pass
body: "{{ lookup('file','issue.json') }}"
force_basic_auth: yes
status_code: 201
body_format: json
# Login to a form based webpage, then use the returned cookie to
# access the app in later tasks
- uri:
url: https://your.form.based.auth.examle.com/index.php
method: POST
body: "name=your_username&password=your_password&enter=Sign%20in"
status_code: 302
HEADER_Content-Type: "application/x-www-form-urlencoded"
register: login
- uri:
url: https://your.form.based.auth.example.com/dashboard.php
method: GET
return_content: yes
HEADER_Cookie: "{{login.set_cookie}}"
# Queue build of a project in Jenkins:
- uri:
url: "http://{{ jenkins.host }}/job/{{ jenkins.job }}/build?token={{ jenkins.token }}"
method: GET
user: "{{ jenkins.user }}"
password: "{{ jenkins.password }}"
force_basic_auth: yes
status_code: 201
'''
HAS_HTTPLIB2 = True
try:
import httplib2
except ImportError:
HAS_HTTPLIB2 = False
HAS_URLPARSE = True
try:
import urlparse
import socket
except ImportError:
HAS_URLPARSE = False
def write_file(module, url, dest, content):
# create a tempfile with some test content
fd, tmpsrc = tempfile.mkstemp()
f = open(tmpsrc, 'wb')
try:
f.write(content)
except Exception, err:
os.remove(tmpsrc)
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
f.close()
checksum_src = None
checksum_dest = None
# raise an error if there is no tmpsrc file
if not os.path.exists(tmpsrc):
os.remove(tmpsrc)
module.fail_json(msg="Source %s does not exist" % (tmpsrc))
if not os.access(tmpsrc, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Source %s not readable" % (tmpsrc))
checksum_src = module.sha1(tmpsrc)
# check if there is no dest file
if os.path.exists(dest):
# raise an error if copy has no permission on dest
if not os.access(dest, os.W_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not writable" % (dest))
if not os.access(dest, os.R_OK):
os.remove(tmpsrc)
module.fail_json( msg="Destination %s not readable" % (dest))
checksum_de
|
Nat-Lab/pac.py
|
lib/confParser.py
|
Python
|
mit
| 3,119
| 0
|
import string
from pyparsing import (
Literal, White, Word, alphanums, CharsNotIn, Forward, Group, SkipTo,
Optional, OneOrMore, ZeroOrMore, pythonStyleComment)
class Parser(object):
left_bracket = Literal("{").suppress()
right_bracket = Literal("}").suppress()
semicolon = Literal(";").suppress()
space = White().suppress()
key = Word(alphanums + "_/")
value = CharsNotIn("{};")
value2 = CharsNotIn(";")
location = CharsNotIn("{};," + string.whitespace)
ifword = Literal("if")
setword = Literal("set")
modifier = Literal("=") | Literal("~*") | Literal("~") | Literal("^~")
assignment = (key + Optional(space + value) + semicolon)
setblock = (setword + OneOrMore(space + value2) + semicolon)
block = Forward()
ifblock = Forward()
subblock = Forward()
ifblock << (
ifword
+ SkipTo('{')
+ left_bracket
+ subblock
+ right_bracket)
subblock << ZeroOrMore(
Group(assignment) | block | ifblock | setblock
)
block << Group(
Group(key + Optional(space + modifier) + Optional(space + location))
+ left_bracket
+ Group(subblock)
+ right_bracket
)
script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)
def __init__(self, source):
self.source = source
def parse(self):
return self.script.parseString(self.source)
def as_list(self):
retur
|
n self.parse().asList()
class Dumper(object):
def __init__(self, blocks, indentation=4):
|
self.blocks = blocks
self.indentation = indentation
def __iter__(self, blocks=None, current_indent=0, spacer=' '):
blocks = blocks or self.blocks
for key, values in blocks:
if current_indent:
yield spacer
indentation = spacer * current_indent
if isinstance(key, list):
yield indentation + spacer.join(key) + ' {'
for parameter in values:
if isinstance(parameter[0], list):
dumped = self.__iter__(
[parameter],
current_indent + self.indentation)
for line in dumped:
yield line
else:
dumped = spacer.join(parameter) + ';'
yield spacer * (
current_indent + self.indentation) + dumped
yield indentation + '}'
else:
yield spacer * current_indent + key + spacer + values + ';'
def as_string(self):
return '\n'.join(self)
def to_file(self, out):
for line in self:
out.write(line+"\n")
out.close()
return out
def loads(source):
return Parser(source).as_list()
def load(_file):
return loads(_file.read())
def dumps(blocks, indentation=4):
return Dumper(blocks, indentation).as_string()
def dump(blocks, _file, indentation=4):
return Dumper(blocks, indentation).to_file(_file)
|
Jumpscale/jumpscale6_core
|
apps/osis/logic/test_complextype/user/test_complextype_user_osismodelbase.py
|
Python
|
bsd-2-clause
| 5,069
| 0.00868
|
from JumpScale import j
class test_complextype_user_osismodelbase(j.code.classGetJSRootModelBase()):
"""
group of users
"""
def __init__(self):
pass
self._P_id=0
self._P_organization=""
self._P_name=""
self._P_emails=list()
self._P_groups=list()
self._P_guid=""
self._P__meta=list()
self._P__meta=["osismodel","test_complextype","user",1] #@todo version not implemented now, just already foreseen
@property
def id(self):
return self._P_id
@id.setter
def id(self, value):
if not isinstance(value, int) and value is not None:
if isinstance(value, basestring) and j.basetype.integer.checkString(value):
value = j.basetype.integer.fromString(value)
else:
msg="property id input error, needs to be int, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_id=value
@id.deleter
def id(self):
del self._P_id
@property
def organization(self):
return self._P_organization
@organization.setter
def organization(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkString(value):
value = j.basetype.string.fromString(value)
else:
msg="property organization input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_organization=value
@organization.deleter
def organization(self):
del self._P_organization
@property
def name(self):
return self._P_name
@name.setter
def name(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkString(value):
value = j.basetype.string.fromString(value)
else:
msg="property name input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_name=value
@name.deleter
def name(self):
del self._P_name
@property
def emails(self):
return self._P_emails
@emails.setter
def emails(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property emails input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_emails=value
@emails.deleter
def emails(self):
del self._P_emails
@property
def groups(self):
return self._P_groups
@groups.setter
def groups(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property groups input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_groups=value
@groups.deleter
def groups(self):
del self._P_groups
@property
def guid(self):
return self._P_guid
@guid.setter
def guid(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkStr
|
ing(value):
value = j.basetype.string.fromString(value)
else:
msg="p
|
roperty guid input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_guid=value
@guid.deleter
def guid(self):
del self._P_guid
@property
def _meta(self):
return self._P__meta
@_meta.setter
def _meta(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property _meta input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P__meta=value
@_meta.deleter
def _meta(self):
del self._P__meta
|
sunlaiqi/fundiy
|
src/shop/utils.py
|
Python
|
mit
| 2,973
| 0.006805
|
"""
This file includes commonly used utilities for this app.
"""
from datetime import datetime
today = datetime.now()
year = today.year
month = today.month
day = today.day
# Following are for images upload helper functions. The first two are used for product upload for the front and back.
# The last two are used for design product upload for the front and back.
def front_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/owner_<id>/product_<id>/Y/m/d/front/<filename>
return 'product_imgs/owner_{0}/pro
|
duct_{1}/{2}/{3}/{4}/front/{5}'.format(instance.owner.id, instance.slug, year, month, day, filename)
def back_image(instance, filename):
# f
|
ile will be uploaded to MEDIA_ROOT/product_imgs/owner_<id>/product_<id>/Y/m/d/back/<filename>
return 'product_imgs/owner_{0}/product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.owner.id, instance.slug, year, month, day, filename)
'''
def front_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/front/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/front/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
def back_design_image(instance, filename):
# file will be uploaded to MEDIA_ROOT/product_imgs/designer_<id>/design_product_<id>/Y/m/d/back/<filename>
return 'product_imgs/designer_{0}/design_product_{1}/{2}/{3}/{4}/back/{5}'.format(instance.designer.id, instance.id, year, month, day, filename)
'''
def fill_category_tree(model, deep=0, parent_id=0, tree=[]):
'''
NAME::
fill_category_tree
DESCRIPTION::
一般用来针对带有parent产品分类表字段的进行遍历,并生成树形结构
PARAMETERS::
:param model: 被遍历的model,具有parent属性
:param deep: 本例中,为了明确表示父子的层次关系,用短线---的多少来表示缩进
:param parent_id: 表示从哪个父类开始,=0表示从最顶层开始
:param tree: 要生成的树形tuple
RETURN::
这里是不需要返回值的,但是如果某个调用中需要可以画蛇添足一下
USAGE::
调用时,可以这样:
choices = [()]
fill_topic_tree(choices=choices)
这里使用[],而不是(),是因为只有[],才能做为“引用”类型传递数据。
'''
if parent_id == 0:
ts = model.objects.filter(parent = None)
# tree[0] += ((None, '选择产品类型'),)
for t in ts:
tmp = [()]
fill_category_tree(4, t.id, tmp)
tree[0] += ((t.id, '-'*deep + t.name,),)
for tt in tmp[0]:
tree[0] += (tt,)
else:
ts = Category.objects.filter(parent_id = parent_id)
for t in ts:
tree[0] += ((t.id, '-'*deep + t.name,),)
fill_category_tree(deep + 4, t.id, tree)
return tree
|
terhorstd/nest-simulator
|
pynest/nest/tests/test_sp/test_disconnect_multiple.py
|
Python
|
gpl-2.0
| 10,751
| 0
|
# -*- coding: utf-8 -*-
#
# test_disconnect_multiple.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import nest
import unittest
__author__ = 'naveau'
class TestDisconnect(unittest.TestCase):
def setUp(self):
nest.ResetKernel()
nest.hl_api.set_verbosity('M_ERROR')
self.exclude_synapse_model = [
'stdp_dopamine_synapse',
'stdp_
|
dopamine_synapse_lbl',
'stdp_dopamine_synapse_hpc',
'stdp_dopamine_synapse_hpc_lbl',
'gap_junction',
'gap_junction_lbl',
'diffusion_connection',
'diffusion_connection_lbl',
'rate_connection_instantaneous',
'rate_connection_instantaneous_lbl',
'rate_connection_delayed',
'rate_connec
|
tion_delayed_lbl',
'clopath_synapse',
'clopath_synapse_lbl'
]
def test_multiple_synapse_deletion_all_to_all(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
nest.SetDefaults(syn_model, {'delay': 0.5})
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
nest.SetKernelStatus({
'min_delay': 0.1,
'max_delay': 1.0,
'structural_plasticity_synapses': {'syn1': syn_dict}
})
neurons = nest.Create('iaf_psc_alpha', 10, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
'SE2': {'z': 0.0, 'growth_rate': 0.0}
}
})
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
# Test if the connected synaptic elements before the simulation
# are correct
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert conns
conndictionary = {'rule': 'all_to_all'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status[0:5]:
self.assertEqual(5, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
for st_neuron in status[5:10]:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(5, st_neuron['SE2']['z_connected'])
def test_multiple_synapse_deletion_one_to_one(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
nest.SetDefaults(syn_model, {'delay': 0.5})
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
nest.SetKernelStatus({
'min_delay': 0.1,
'max_delay': 1.0,
'structural_plasticity_synapses': {'syn1': syn_dict}
})
neurons = nest.Create('iaf_psc_alpha', 10, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
'SE2': {'z': 0.0, 'growth_rate': 0.0}
}
})
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
# Test if the connected synaptic elements before the simulation
# are correct
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert conns
conndictionary = {'rule': 'one_to_one'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status[0:5]:
self.assertEqual(9, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
for st_neuron in status[5:10]:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(9, st_neuron['SE2']['z_connected'])
def test_multiple_synapse_deletion_one_to_one_no_sp(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
neurons = nest.Create('iaf_psc_alpha', 10)
syn_dict = {'model': syn_model}
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert len(conns) == 20
conndictionary = {'rule': 'one_to_one'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
conns = nest.GetConnections(srcId, targId, syn_model)
assert len(conns) == 16
def test_single_synapse_deletion_sp(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
# nest.SetKernelStatus(
# {'structural_plasticity_synapses': {'syn1': syn_dict}}
# )
neurons = nest.Create('iaf_psc_alpha', 2, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
|
Koppermann/mod-mul-mersenne
|
mod_mul_mersenne/classes.py
|
Python
|
mit
| 2,433
| 0.000822
|
"""File holds the three classes Bit, DigitProduct, and PartialProduct."""
class Bit:
"""Class Bit represents a single bit of a digit-product."""
def __init__(self, identifier, absolute, relative):
self.identifier = identifier
self.absolute = absolute
self.relative = relative
def shift(self, x_bits):
"""Shift bit in its absolute position by x_bits."""
self.absolute %= x_bits
def print_info(self):
"""Print class info."""
print("identifier =", self.identifier)
print("absolute =", self.absolute)
print("relative =", self.relative)
class DigitProduct():
"""Class DigitProduct represents a DSP multiplier i.e. digit-product."""
def __init__(self, identifier, lsb, msb):
self.identifier = identifier
self.lsb = lsb
|
self.msb = msb
|
def slice_block(self):
"""Slice digit-product in single bits."""
bit_list = []
for i in range(0, self.msb-self.lsb+1):
bit_list.append(Bit(self.identifier, self.lsb+i, i))
return bit_list
def print_info(self):
"""Print class info."""
print("identifier =", self.identifier)
print(self.msb, "downto", self.lsb)
class PartialProduct:
"""Class PartialProduct represents a partial-product that can hold an
undefined amount of class Bit instances."""
def __init__(self, exp_prime):
self.bit_list = []
self.exp_prime = exp_prime
def add_bit(self, new_bit):
"""Add bit to current partial-product."""
for current_bit in self.bit_list:
if current_bit.absolute == new_bit.absolute:
return False
self.bit_list.append(new_bit)
return True
def print_info(self):
"""Print class info of all bits contained in this partial-product."""
for current_bit in self.bit_list:
current_bit.print_info()
def print_line(self, line_number):
"""Print partial-product indicating whether bit positions are taken."""
print("PP%#02d"% line_number, end=" ")
for i in range(0, self.exp_prime):
success = 0
for current_bit in self.bit_list:
if current_bit.absolute == i:
success = 1
if success == 1:
print("o", end="")
else:
print("x", end="")
print("")
|
openpli-arm/enigma2-arm
|
RecordTimer.py
|
Python
|
gpl-2.0
| 26,731
| 0.033781
|
from enigma import eEPGCache, getBestPlayableServiceReference, \
eServiceReference, iRecordableService, quitMainloop
from Components.config import config
from Components.UsageConfig import defaultMoviePath
from Components.TimerSanityCheck import TimerSanityCheck
from Screens.MessageBox import MessageBox
import Screens.Standby
from Tools import Directories, Notifications, ASCIItranslit, Trashcan
from Tools.XMLTools import stringToXML
import timer
import xml.etree.cElementTree
import NavigationInstance
from ServiceReference import ServiceReference
from time import localtime, strftime, ctime, ti
|
me
from bisect import insort
# ok, for descriptions etc we have:
# service reference (to get the service name)
# name (title)
# description (d
|
escription)
# event data (ONLY for time adjustments etc.)
# parses an event, and gives out a (begin, end, name, duration, eit)-tuple.
# begin and end will be corrected
def parseEvent(ev, description = True):
if description:
name = ev.getEventName()
description = ev.getShortDescription()
if description == "":
description = ev.getExtendedDescription()
else:
name = ""
description = ""
begin = ev.getBeginTime()
end = begin + ev.getDuration()
eit = ev.getEventId()
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
return (begin, end, name, description, eit)
class AFTEREVENT:
NONE = 0
STANDBY = 1
DEEPSTANDBY = 2
AUTO = 3
# please do not translate log messages
class RecordTimerEntry(timer.TimerEntry, object):
######### the following static methods and members are only in use when the box is in (soft) standby
receiveRecordEvents = False
@staticmethod
def shutdown():
quitMainloop(1)
@staticmethod
def staticGotRecordEvent(recservice, event):
if event == iRecordableService.evEnd:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evEnd)"
recordings = NavigationInstance.instance.getRecordings()
if not recordings: # no more recordings exist
rec_time = NavigationInstance.instance.RecordTimer.getNextRecordingTime()
if rec_time > 0 and (rec_time - time()) < 360:
print "another recording starts in", rec_time - time(), "seconds... do not shutdown yet"
else:
print "no starting records in the next 360 seconds... immediate shutdown"
RecordTimerEntry.shutdown() # immediate shutdown
elif event == iRecordableService.evStart:
print "RecordTimer.staticGotRecordEvent(iRecordableService.evStart)"
@staticmethod
def stopTryQuitMainloop():
print "RecordTimer.stopTryQuitMainloop"
NavigationInstance.instance.record_event.remove(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = False
@staticmethod
def TryQuitMainloop(default_yes = True):
if not RecordTimerEntry.receiveRecordEvents:
print "RecordTimer.TryQuitMainloop"
NavigationInstance.instance.record_event.append(RecordTimerEntry.staticGotRecordEvent)
RecordTimerEntry.receiveRecordEvents = True
# send fake event.. to check if another recordings are running or
# other timers start in a few seconds
RecordTimerEntry.staticGotRecordEvent(None, iRecordableService.evEnd)
# send normal notification for the case the user leave the standby now..
Notifications.AddNotification(Screens.Standby.TryQuitMainloop, 1, onSessionOpenCallback=RecordTimerEntry.stopTryQuitMainloop, default_yes = default_yes)
#################################################################
def __init__(self, serviceref, begin, end, name, description, eit, disabled = False, justplay = False, afterEvent = AFTEREVENT.AUTO, checkOldTimers = False, dirname = None, tags = None):
timer.TimerEntry.__init__(self, int(begin), int(end))
if checkOldTimers == True:
if self.begin < time() - 1209600:
self.begin = int(time())
if self.end < self.begin:
self.end = self.begin
assert isinstance(serviceref, ServiceReference)
if serviceref.isRecordable():
self.service_ref = serviceref
else:
self.service_ref = ServiceReference(None)
self.eit = eit
self.dontSave = False
self.name = name
self.description = description
self.disabled = disabled
self.timer = None
self.__record_service = None
self.start_prepare = 0
self.justplay = justplay
self.afterEvent = afterEvent
self.dirname = dirname
self.dirnameHadToFallback = False
self.autoincrease = False
self.autoincreasetime = 3600 * 24 # 1 day
self.tags = tags or []
self.log_entries = []
self.resetState()
def log(self, code, msg):
self.log_entries.append((int(time()), code, msg))
print "[TIMER]", msg
def calculateFilename(self):
service_name = self.service_ref.getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(self.begin))
begin_shortdate = strftime("%Y%m%d", localtime(self.begin))
print "begin_date: ", begin_date
print "service_name: ", service_name
print "name:", self.name
print "description: ", self.description
filename = begin_date + " - " + service_name
if self.name:
if config.usage.setup_level.index >= 2: # expert+
if config.recording.filename_composition.value == "short":
filename = begin_shortdate + " - " + self.name
elif config.recording.filename_composition.value == "long":
filename += " - " + self.name + " - " + self.description
else:
filename += " - " + self.name # standard
else:
filename += " - " + self.name
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
if not self.dirname or not Directories.fileExists(self.dirname, 'w'):
if self.dirname:
self.dirnameHadToFallback = True
dirname = defaultMoviePath()
else:
dirname = self.dirname
self.Filename = Directories.getRecordingFilename(filename, dirname)
self.log(0, "Filename calculated as: '%s'" % self.Filename)
#begin_date + " - " + service_name + description)
def tryPrepare(self):
if self.justplay:
return True
else:
self.calculateFilename()
rec_ref = self.service_ref and self.service_ref.ref
if rec_ref and rec_ref.flags & eServiceReference.isGroup:
rec_ref = getBestPlayableServiceReference(rec_ref, eServiceReference())
if not rec_ref:
self.log(1, "'get best playable service for group... record' failed")
return False
self.record_service = rec_ref and NavigationInstance.instance.recordService(rec_ref)
if not self.record_service:
self.log(1, "'record service' failed")
return False
if self.repeated:
epgcache = eEPGCache.getInstance()
queryTime=self.begin+(self.end-self.begin)/2
evt = epgcache.lookupEventTime(rec_ref, queryTime)
if evt:
self.description = evt.getShortDescription()
if self.description == "":
description = evt.getExtendedDescription()
event_id = evt.getEventId()
else:
event_id = -1
else:
event_id = self.eit
if event_id is None:
event_id = -1
prep_res=self.record_service.prepare(self.Filename + ".ts", self.begin, self.end, event_id, self.name.replace("\n", ""), self.description.replace("\n", ""), ' '.join(self.tags))
if prep_res:
if prep_res == -255:
self.log(4, "failed to write meta information")
else:
self.log(2, "'prepare' failed: error %d" % prep_res)
# we must calc nur start time before stopRecordService call because in Screens/Standby.py TryQuitMainloop tries to get
# the next start time in evEnd event handler...
self.do_backoff()
self.start_prepare = time() + self.backoff
NavigationInstance.instance.stopRecordService(self.record_service)
self.record_service = None
return False
return True
def do_backoff(self):
if self.backoff == 0:
self.backoff = 5
else:
self.backoff *= 2
if self.backoff > 100:
self.backoff = 100
self.log(10, "backoff: retry in %d seconds" % self.backoff)
def activate(self):
next_state = self.state + 1
self.log(5, "activating state %d" % next_state)
if next_state == self.StatePrepared:
if self.tryPrepare():
self.log(6, "prepare ok, waiting for begin")
# create file to "reserve" the filename
# because another recording at the sam
|
1iyiwei/pyml
|
code/ch13/theano_test.py
|
Python
|
mit
| 29
| 0
|
import
|
theano
theano.tes
|
t()
|
audebert/alot
|
alot/foreign/urwidtrees/example4.filesystem.py
|
Python
|
gpl-3.0
| 4,462
| 0.002465
|
#!/usr/bin/python
# Copyright (C) 2013 Patrick Totzke <patricktotzke@gmail.com>
# This file is released under the GNU GPL, version 3 or a later revision.
import urwid
import os
from example1 import palette # example data
from widgets import TreeBox
from tree import Tree
from decoration import CollapsibleArrowTree
# define selectable urwid.Text widgets to display paths
class FocusableText(urwid.WidgetWrap):
"""Widget to display paths lines"""
def __init__(self, txt):
t = urwid.Text(txt)
w = urwid.AttrMap(t, 'body', 'focus')
urwid.WidgetWrap.__init__(self, w)
def selectable(self):
return True
def keypress(self,
|
size, key):
return key
# define Tree that can
|
walk your filesystem
class DirectoryTree(Tree):
"""
A custom Tree representing our filesystem structure.
This implementation is rather inefficient: basically every position-lookup
will call `os.listdir`.. This makes navigation in the tree quite slow.
In real life you'd want to do some caching.
As positions we use absolute path strings.
"""
# determine dir separator and form of root node
pathsep = os.path.sep
drive, _ = os.path.splitdrive(pathsep)
# define root node This is part of the Tree API!
root = drive + pathsep
def __getitem__(self, pos):
return FocusableText(pos)
# generic helper
def _list_dir(self, path):
"""returns absolute paths for all entries in a directory"""
try:
elements = [os.path.join(
path, x) for x in os.listdir(path) if os.path.isdir(path)]
elements.sort()
except OSError:
elements = None
return elements
def _get_siblings(self, pos):
"""lists the parent directory of pos """
parent = self.parent_position(pos)
siblings = [pos]
if parent is not None:
siblings = self._list_dir(parent)
return siblings
# Tree API
def parent_position(self, pos):
parent = None
if pos != '/':
parent = os.path.split(pos)[0]
return parent
def first_child_position(self, pos):
candidate = None
if os.path.isdir(pos):
children = self._list_dir(pos)
if children:
candidate = children[0]
return candidate
def last_child_position(self, pos):
candidate = None
if os.path.isdir(pos):
children = self._list_dir(pos)
if children:
candidate = children[-1]
return candidate
def next_sibling_position(self, pos):
candidate = None
siblings = self._get_siblings(pos)
myindex = siblings.index(pos)
if myindex + 1 < len(siblings): # pos is not the last entry
candidate = siblings[myindex + 1]
return candidate
def prev_sibling_position(self, pos):
candidate = None
siblings = self._get_siblings(pos)
myindex = siblings.index(pos)
if myindex > 0: # pos is not the first entry
candidate = siblings[myindex - 1]
return candidate
def unhandled_input(k):
#exit on q
if k in ['q', 'Q']: raise urwid.ExitMainLoop()
if __name__ == "__main__":
cwd = os.getcwd() # get current working directory
dtree = DirectoryTree() # get a directory walker
# Use CollapsibleArrowTree for decoration.
# define initial collapse:
as_deep_as_cwd = lambda pos: dtree.depth(pos) >= dtree.depth(cwd)
# We hide the usual arrow tip and use a customized collapse-icon.
decorated_tree = CollapsibleArrowTree(dtree,
is_collapsed=as_deep_as_cwd,
arrow_tip_char=None,
icon_frame_left_char=None,
icon_frame_right_char=None,
icon_collapsed_char=u'\u25B6',
icon_expanded_char=u'\u25B7',)
# stick it into a TreeBox and use 'body' color attribute for gaps
tb = TreeBox(decorated_tree, focus=cwd)
root_widget = urwid.AttrMap(tb, 'body')
#add a text footer
footer = urwid.AttrMap(urwid.Text('Q to quit'), 'focus')
#enclose all in a frame
urwid.MainLoop(urwid.Frame(root_widget, footer=footer), palette, unhandled_input = unhandled_input).run() # go
|
CLVsol/clvsol_odoo_addons
|
clv_external_sync/models/external_sync_template.py
|
Python
|
agpl-3.0
| 2,540
| 0.001181
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class ExternalSyncTemplate(models.Model):
_description =
|
'External Sync Template'
_name = 'clv.external_sync.te
|
mplate'
_order = 'name'
name = fields.Char(
string='Name',
required=True,
help='External Sync Template Name'
)
external_host_id = fields.Many2one(
comodel_name='clv.external_sync.host',
string='External Host'
)
external_max_task = fields.Integer(
string='Max Task Registers'
)
external_disable_identification = fields.Boolean(
string='Disable Identification'
)
external_disable_check_missing = fields.Boolean(
string='Disable Check Missing'
)
external_disable_inclusion = fields.Boolean(
string='Disable Inclusion'
)
external_disable_sync = fields.Boolean(
string='Disable Sync'
)
external_last_update_start = fields.Datetime(
string="Last Update (Start)"
)
external_last_update_end = fields.Datetime(
string="Last Update (End)"
)
enable_sequence_code_sync = fields.Boolean(
string='Enable Sequence Code Sync'
)
notes = fields.Text(string='Notes')
date_inclusion = fields.Datetime(
string='Inclusion Date',
default=fields.Datetime.now)
model = fields.Char(
string='Model',
required=True,
help="Model name of the object on which the synchronization method to be called is located, e.g. 'res.partner'"
)
method = fields.Char(
string='Method',
required=True,
help="Name of the method to be called when the synchronization job is processed."
)
sequence_code = fields.Char(
string='Sequence Code',
required=False,
help="Code of the Sequence to be synchronized when the synchronization job is processed."
)
external_model = fields.Char(
string='External Model',
required=True,
help="External model name, e.g. 'res.partner'"
)
external_sequence_code = fields.Char(
string='External Sequence Code',
required=False,
help="External Sequence Code, e.g. 'clv_address.code."
)
active = fields.Boolean(string='Active', default=1)
_sql_constraints = [
('name_uniq',
'UNIQUE (name)',
u'Error! The Name must be unique!'),
]
|
cyli/volatility
|
volatility/plugins/mac/socket_filters.py
|
Python
|
gpl-2.0
| 3,566
| 0.00673
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.plugins.mac.common as common
import volatility.plugins.mac.lsmod as lsmod
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class mac_socket_filters(lsmod.mac_lsmod):
""" Reports socket filters """
def calculate(self):
c
|
ommon.set_plugin_members(self)
# get the symbols need to check for if rootkit or not
(kernel_symbol_addresses, kmods) = common.get_kernel_addrs(self)
members = ["sf_unregistered", "sf_attach", "sf_detach", "sf_notify", "sf_getpeername", "sf_getsockname"]
members = members + ["sf_data_in", "sf_data_out", "sf_connect_in", "sf_
|
connect_out", "sf_bind", "sf_setoption"]
members = members + ["sf_getoption", "sf_listen", "sf_ioctl"]
sock_filter_head_addr = self.addr_space.profile.get_symbol("_sock_filter_head")
sock_filter_list = obj.Object("socket_filter_list", offset = sock_filter_head_addr, vm = self.addr_space)
cur = sock_filter_list.tqh_first
while cur:
filter = cur.sf_filter
filter_name = self.addr_space.read(filter.sf_name, 256)
idx = filter_name.index("\x00")
if idx != -1:
filter_name = filter_name[:idx]
filter_socket = cur.sf_entry_head.sfe_socket.obj_offset
for member in members:
ptr = filter.m(member)
if not ptr:
continue
(good, module) = common.is_known_address_name(ptr.v(), kernel_symbol_addresses, kmods)
yield good, filter, filter_name, filter_socket, member, ptr, module
cur = cur.sf_global_next.tqe_next
def unified_output(self, data):
return TreeGrid([("Offset (V)", Address),
("Filter Name", str),
("Filter Member", str),
("Socket (V)", Address),
("Handler", Address),
("Module", str),
("Status", str),
], self.generator(data))
def generator(self, data):
for (good, filter, filter_name, filter_socket, member, ptr, module) in data:
if good == 0:
status = "UNKNOWN"
else:
status = "OK"
yield(0, [
Address(filter.obj_offset),
str(filter_name),
str(member),
Address(filter_socket),
Address(ptr),
str(module),
str(status),
])
|
benosment/recipes
|
functional_tests/test_edit_recipe.py
|
Python
|
mit
| 8,273
| 0.00411
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from .base import FunctionalTest
class RecipeEditTest(FunctionalTest):
def test_can_add_a_recipe(self):
# Ben goes to the recipe website homepage
self.browser.get(self.server_url)
# He notices the page title mention cookbook
self.assertIn('cookbook', self.browser.title)
# He is invited to enter his name to create his own cookbook or
# view other user's cookbook's
# Ben wants to create his own right now, so he enters his name
# and then clicks the 'get started button'
# TODO -- duplication here. consider refactoring if there is a third instance
username_input = self.browser.find_element_by_id('id_username')
username_input.send_keys('ben')
username_input.send_keys(Keys.ENTER)
# Ben goes to a unique URL which includes his name
ben_url = self.browser.current_url
self.assertRegex(ben_url, '/users/ben.+')
# He is invited to click on a link to add a new recipe
add_recipe_button = self.browser.find_element_by_id('id_add_recipe_button')
|
self.assertIn('Add recipe', add_recipe_button.text)
# He clicks on the link and new page appears
add_recipe_button.click()
# When he adds a new recipe,
|
he is taken to a new URL
self.assertRegex(self.browser.current_url, '/users/.*/add_recipe')
# He sees a form with a textbox for name, ingredients, directions and servings
# along with a 'cancel' and 'add' button
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('Add Recipe', header_text)
name_textbox = self.browser.find_element_by_id('id_title')
self.assertEqual(name_textbox.get_attribute('placeholder'),
'Enter the title of the recipe')
ingredients_textbox = self.browser.find_element_by_id('id_ingredients')
directions_textbox = self.browser.find_element_by_id('id_directions')
servings_textbox = self.browser.find_element_by_id('id_servings')
add_button = self.browser.find_element_by_id('id_add_button')
# He types in Grilled Halibut with Mango-Avocado Salsa into the textbox for name
name_textbox.send_keys('Grilled Halibut with Mango-Avocado Salsa')
# He types in ingredients:
ingredients_textbox.send_keys('1 medium ripe avocado, peeled and cut into 1/2" dice')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('1 medium ripe mango, peeled and cut into 1/2" dice')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('1 cup cherry tomatoes, quartered')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('4 large fresh basil leaves, thinly sliced')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('3 tablespoons extra-virgin olive oil, divided, plus more for brushing')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('3 tablespoons fresh lime juice, divided')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('Kosher salt and freshly ground black pepper')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('4 6-ounce halibut or mahi-mahi fillets')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('4 lime wedges')
# He then types in the following for directions:
directions_textbox.send_keys('Prepare a grill to medium-high heat. Gently combine the avocado, mango, '
'tomatoes, basil, 1 tablespoon oil, and 1 tablespoon lime juice in a large mixing '
'bowl. Season salsa to taste with salt and pepper and set aside at room '
'temperature, gently tossing occasionally.')
directions_textbox.send_keys(Keys.ENTER)
directions_textbox.send_keys('Place fish fillets in a 13x9x2" glass baking dish. Drizzle remaining 2 '
'tablespoon oil and 2 tablespoon lime juice over. Season fish with salt and '
'pepper. Let marinate at room temperature for 10 minutes, turning fish '
'occasionally.')
directions_textbox.send_keys(Keys.ENTER)
directions_textbox.send_keys('Brush grill rack with oil. Grill fish until just opaque in center, about 5 '
'minutes per side. Transfer to plates. Spoon mango-avocado salsa over fish. '
'Squeeze a lime wedge over each and serve.')
# He then types in the servings
servings_textbox.send_keys('7')
# Finally, he clicks the add button
add_button.click()
# He is returned to the main page
# He sees that the recipe appears in the list of recipes
self.check_for_row_in_list_table('Grilled Halibut with Mango-Avocado Salsa')
# Ben then clicks on a recipe to get the full info
recipe_link = self.browser.find_element_by_link_text('Grilled Halibut with Mango-Avocado Salsa')
recipe_link.click()
# He is taken to a new page which has the title in the url
self.assertRegex(self.browser.current_url, '/users/(\S+)/recipe/grilled-halibut-with-mango-avocado-salsa')
# The new page lists all of the ingredients and directions
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('1 medium ripe avocado, peeled and cut into 1/2" dice', page_text)
self.assertIn('Prepare a grill to medium-high heat. Gently combine the avocado, mango, ', page_text)
# He then remembers that the servings are for 8 people and a chili pepper is needed. He clicks
# on the edit button to start editing
edit_button = self.browser.find_element_by_id('id_edit_button')
self.assertIn('Edit', edit_button.text)
edit_button.click()
# The edit page shows the same text as before
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('1 medium ripe avocado, peeled and cut into 1/2" dice', page_text)
self.assertIn('Prepare a grill to medium-high heat. Gently combine the avocado, mango, ', page_text)
# He changes the number of servings from 7 to 8
servings_textbox = self.browser.find_element_by_id('id_servings')
servings_textbox.send_keys(Keys.BACKSPACE)
servings_textbox.send_keys('8')
# He adds chili pepper to the list of ingredients
ingredients_textbox = self.browser.find_element_by_id('id_ingredients')
ingredients_textbox.send_keys(Keys.ENTER)
ingredients_textbox.send_keys('1 chili pepper')
# He adds a note for next time
notes_textbox = self.browser.find_element_by_id('id_notes')
notes_textbox.send_keys("Wasn't that spicy, added a pepper")
# He then clicks the save button
save_button = self.browser.find_element_by_id('id_save_button')
self.assertIn('Save', save_button.text)
save_button.click()
# He is returned to the recipe page
self.assertRegex(self.browser.current_url, '/users/(\S+)/recipe/grilled-halibut-with-mango-avocado-salsa')
# He can see his changes reflected on the page
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('8', page_text)
self.assertNotIn('7', page_text)
self.assertIn('1 chili pepper', page_text)
self.assertIn('added a pepper', page_text)
#self.fail('Finish the test')
# He changes his mind and cancels
# cancel_button = self.browser.find_element_by_name('id_cancel_button')
#cancel_button.click()
# He is returned to the main page
# The number of recipes is still 1
# table = self.browser.find_element_by_id('id_recipe_table')
|
jamestwebber/scipy
|
scipy/signal/wavelets.py
|
Python
|
bsd-3-clause
| 13,701
| 0.000292
|
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.dual import eig
from scipy.special import comb
from scipy.signal import convolve
__all__ = ['daub', 'qmf', 'cascade', 'morlet', 'ricker', 'morlet2', 'cwt']
def daub(p):
"""
The coefficients for the FIR low-pass filter producing Daubechies wavelets.
p>=1 gives the order of the zero at f=1/2.
There are 2p filter coefficients.
Parameters
----------
p : int
Order of the zero at f=1/2, can have values from 1 to 34.
Returns
-------
daub : ndarray
Return
"""
sqrt = np.sqrt
if p < 1:
raise ValueError("p must be at least 1.")
if p == 1:
c = 1 / sqrt(2)
return np.array([c, c])
elif p == 2:
f = sqrt(2) / 8
c =
|
sqrt(3)
return f * np.array([1 + c, 3 + c, 3 - c, 1 - c])
elif p == 3:
tmp = 12 * sqrt(10)
z1 = 1.5 + sqrt(15 + tmp) / 6 - 1j * (sqrt(15) + sqrt(tmp - 15)) / 6
z1c = np.conj(z1)
f = sqrt(2) / 8
d0 = np.real((1 - z1) * (1 - z1c))
a0 = np.real(z1 * z1c)
a1 = 2 * np.real(z1)
return f / d0 * np.array([a0, 3 * a0 - a1, 3 * a0 - 3 * a1 + 1,
|
a0 - 3 * a1 + 3, 3 - a1, 1])
elif p < 35:
# construct polynomial and factor it
if p < 35:
P = [comb(p - 1 + k, k, exact=1) for k in range(p)][::-1]
yj = np.roots(P)
else: # try different polynomial --- needs work
P = [comb(p - 1 + k, k, exact=1) / 4.0**k
for k in range(p)][::-1]
yj = np.roots(P) / 4
# for each root, compute two z roots, select the one with |z|>1
# Build up final polynomial
c = np.poly1d([1, 1])**p
q = np.poly1d([1])
for k in range(p - 1):
yval = yj[k]
part = 2 * sqrt(yval * (yval - 1))
const = 1 - 2 * yval
z1 = const + part
if (abs(z1)) < 1:
z1 = const - part
q = q * [1, -z1]
q = c * np.real(q)
# Normalize result
q = q / np.sum(q) * sqrt(2)
return q.c[::-1]
else:
raise ValueError("Polynomial factorization does not work "
"well for p too large.")
def qmf(hk):
"""
Return high-pass qmf filter from low-pass
Parameters
----------
hk : array_like
Coefficients of high-pass filter.
"""
N = len(hk) - 1
asgn = [{0: 1, 1: -1}[k % 2] for k in range(N + 1)]
return hk[::-1] * np.array(asgn)
def cascade(hk, J=7):
"""
Return (x, phi, psi) at dyadic points ``K/2**J`` from filter coefficients.
Parameters
----------
hk : array_like
Coefficients of low-pass filter.
J : int, optional
Values will be computed at grid points ``K/2**J``. Default is 7.
Returns
-------
x : ndarray
The dyadic points ``K/2**J`` for ``K=0...N * (2**J)-1`` where
``len(hk) = len(gk) = N+1``.
phi : ndarray
The scaling function ``phi(x)`` at `x`:
``phi(x) = sum(hk * phi(2x-k))``, where k is from 0 to N.
psi : ndarray, optional
The wavelet function ``psi(x)`` at `x`:
``phi(x) = sum(gk * phi(2x-k))``, where k is from 0 to N.
`psi` is only returned if `gk` is not None.
Notes
-----
The algorithm uses the vector cascade algorithm described by Strang and
Nguyen in "Wavelets and Filter Banks". It builds a dictionary of values
and slices for quick reuse. Then inserts vectors into final vector at the
end.
"""
N = len(hk) - 1
if (J > 30 - np.log2(N + 1)):
raise ValueError("Too many levels.")
if (J < 1):
raise ValueError("Too few levels.")
# construct matrices needed
nn, kk = np.ogrid[:N, :N]
s2 = np.sqrt(2)
# append a zero so that take works
thk = np.r_[hk, 0]
gk = qmf(hk)
tgk = np.r_[gk, 0]
indx1 = np.clip(2 * nn - kk, -1, N + 1)
indx2 = np.clip(2 * nn - kk + 1, -1, N + 1)
m = np.zeros((2, 2, N, N), 'd')
m[0, 0] = np.take(thk, indx1, 0)
m[0, 1] = np.take(thk, indx2, 0)
m[1, 0] = np.take(tgk, indx1, 0)
m[1, 1] = np.take(tgk, indx2, 0)
m *= s2
# construct the grid of points
x = np.arange(0, N * (1 << J), dtype=float) / (1 << J)
phi = 0 * x
psi = 0 * x
# find phi0, and phi1
lam, v = eig(m[0, 0])
ind = np.argmin(np.absolute(lam - 1))
# a dictionary with a binary representation of the
# evaluation points x < 1 -- i.e. position is 0.xxxx
v = np.real(v[:, ind])
# need scaling function to integrate to 1 so find
# eigenvector normalized to sum(v,axis=0)=1
sm = np.sum(v)
if sm < 0: # need scaling function to integrate to 1
v = -v
sm = -sm
bitdic = {'0': v / sm}
bitdic['1'] = np.dot(m[0, 1], bitdic['0'])
step = 1 << J
phi[::step] = bitdic['0']
phi[(1 << (J - 1))::step] = bitdic['1']
psi[::step] = np.dot(m[1, 0], bitdic['0'])
psi[(1 << (J - 1))::step] = np.dot(m[1, 1], bitdic['0'])
# descend down the levels inserting more and more values
# into bitdic -- store the values in the correct location once we
# have computed them -- stored in the dictionary
# for quicker use later.
prevkeys = ['1']
for level in range(2, J + 1):
newkeys = ['%d%s' % (xx, yy) for xx in [0, 1] for yy in prevkeys]
fac = 1 << (J - level)
for key in newkeys:
# convert key to number
num = 0
for pos in range(level):
if key[pos] == '1':
num += (1 << (level - 1 - pos))
pastphi = bitdic[key[1:]]
ii = int(key[0])
temp = np.dot(m[0, ii], pastphi)
bitdic[key] = temp
phi[num * fac::step] = temp
psi[num * fac::step] = np.dot(m[1, ii], pastphi)
prevkeys = newkeys
return x, phi, psi
def morlet(M, w=5.0, s=1.0, complete=True):
"""
Complex Morlet wavelet.
Parameters
----------
M : int
Length of the wavelet.
w : float, optional
Omega0. Default is 5
s : float, optional
Scaling factor, windowed from ``-s*2*pi`` to ``+s*2*pi``. Default is 1.
complete : bool, optional
Whether to use the complete or the standard version.
Returns
-------
morlet : (M,) ndarray
See Also
--------
morlet2 : Implementation of Morlet wavelet, compatible with `cwt`.
scipy.signal.gausspulse
Notes
-----
The standard version::
pi**-0.25 * exp(1j*w*x) * exp(-0.5*(x**2))
This commonly used wavelet is often referred to simply as the
Morlet wavelet. Note that this simplified version can cause
admissibility problems at low values of `w`.
The complete version::
pi**-0.25 * (exp(1j*w*x) - exp(-0.5*(w**2))) * exp(-0.5*(x**2))
This version has a correction
term to improve admissibility. For `w` greater than 5, the
correction term is negligible.
Note that the energy of the return wavelet is not normalised
according to `s`.
The fundamental frequency of this wavelet in Hz is given
by ``f = 2*s*w*r / M`` where `r` is the sampling rate.
Note: This function was created before `cwt` and is not compatible
with it.
"""
x = np.linspace(-s * 2 * np.pi, s * 2 * np.pi, M)
output = np.exp(1j * w * x)
if complete:
output -= np.exp(-0.5 * (w**2))
output *= np.exp(-0.5 * (x**2)) * np.pi**(-0.25)
return output
def ricker(points, a):
"""
Return a Ricker wavelet, also known as the "Mexican hat wavelet".
It models the function:
``A * (1 - (x/a)**2) * exp(-0.5*(x/a)**2)``,
where ``A = 2/(sqrt(3*a)*(pi**0.25))``.
Parameters
----------
points : int
Number of points in `vector`.
Will be centered around 0.
a : scalar
Width parameter of the wavelet.
Returns
-------
vector : (N,) ndarray
Array of length `points` in shape of ricker curve.
Examples
|
hl475/vispek
|
examples/run_raw_file_io.py
|
Python
|
apache-2.0
| 1,604
| 0.003117
|
# Copyright 2017 The Vispek Authors. All Rights Reserved.
#
# Licensed under the Apache License, Ve
|
rsion 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the
|
License.
# ==========================================================================
""" Example code about how to run raw_file_io
python3 -m vispek.examples.run_raw_file_io \
--in_path /Users/huaminli/Downloads/data \
--out_path /Users/huaminli/Desktop/vispek/data
"""
import argparse
from vispek.lib.io.raw_file_io import RawFileIO
def run_file_io(args):
my_file_io = RawFileIO(args.in_path, args.out_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Example code about how tun run raw_file_io')
parser.add_argument(
'--in_path', type=str,
help='absolute path to the directories that contains raw csv files')
parser.add_argument(
'--out_path', type=str,
help='absolute path to the directories that contains ' +
'preproceed files')
args = parser.parse_args()
print(args.in_path)
print(args.out_path)
run_file_io(args)
|
openhatch/oh-mainline
|
vendor/packages/scrapy/scrapy/commands/version.py
|
Python
|
agpl-3.0
| 850
| 0.004706
|
import sys
import platform
import twisted
import scrapy
from scrapy.command import ScrapyCommand
class Command(ScrapyCommand):
def syntax(self):
return "[-v]"
def short_desc(self):
return
|
"Print Scrapy version"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("--verbose", "-v", dest="verbose", action="store_true",
help="also display twisted/python/platform info (useful for bug reports)")
def run(self, args, opts):
if opts.verbose:
print "Scrapy : %s" % scrapy.__version__
print "Twisted : %s" % twisted.version.short()
print "Python : %s" % sys.version.replace("\
|
n", "- ")
print "Platform: %s" % platform.platform()
else:
print "Scrapy %s" % scrapy.__version__
|
tbenst/jupyter_webppl
|
jupyter_webppl/jupyter_webppl.py
|
Python
|
gpl-3.0
| 2,273
| 0.00132
|
# This code can be put in any Python module, it does not require IPython
# itself to be running already. It only creates the magics subclass but
# doesn't instantiate it yet.
# from __future__ import print_function
import json
from IPython.core.magic import (Magics, magics_class, line_magic,
cell_magic, line_cell_magic)
from IPython.display import HTML, display
# The cla
|
ss MUST call this class decorator at creation time
@magics_class
class WebpplMagics(Magics):
def __init__(self, **kwargs):
super(WebpplMagics, self).__init__(**kwargs)
@line_magic
def lmagic(self, line):
"my line magic"
print("Full access to the main IPython object:", self.shell)
print("Variables in the user namespace:", list(self.shell.user_ns.keys()))
return line
@cell_magic
def webppl(self, line, cell)
|
:
"my cell magic"
code = json.dumps(cell)
store = json.dumps(self.shell.user_ns['store'])
h = """
<script>
requirejs.config({
paths: {
webppl: "//cdn.webppl.org/webppl-v0.9.1"
}
});
require(['webppl'], function(webppl) {
window.webppl = webppl;
});
</script>
<script>
const code = JSON.parse('""" + code + """');
const initialStore = JSON.parse('""" + store + """');
var result;
webppl.run(code, function(s,x) {result = x},
{initialStore: initialStore});
let return = JSON.stringify(result)
IPython.kernel.Notebook.execute("result='"+return+"'")
result
</script>
"""
display(HTML(h))
@line_cell_magic
def lcmagic(self, line, cell=None):
"Magic that works both as %lcmagic and as %%lcmagic"
if cell is None:
print("Called as line magic")
return line
else:
print("Called as cell magic")
return line, cell
def load_ipython_extension(ipython):
ip = ipython
# ip = get_ipython()
ip.register_magics(WebpplMagics)
if __name__ == "__main__":
load_ipython_extension(get_ipython())
|
mylxiaoyi/mypyqtgraph-qt5
|
pyqtgraph/imageview/ImageViewTemplate_pyqt.py
|
Python
|
mit
| 8,950
| 0.003017
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ImageViewTemplate.ui'
#
# Created: Thu May 1 15:20:40 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtWidgets.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(726, 588)
self.gridLayout_3 = QtWidgets.QGridLayout(Form)
#self.gridLayout_3.setMargin(0)
self.gridLayout_3.setSpacing(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.gridLayout = QtWidgets.QGridLayout(self.layoutWidget)
self.gridLayout.setSpacing(0)
#self.gridLayout.setMargin(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.graphicsView = GraphicsView(self.layoutWidget)
self.graphicsView.setObjectName(_fromUtf8("graphicsView"))
self.gridLayout.addWidget(self.graphicsView, 0, 0, 2, 1)
self.histogram = HistogramLUTWidget(self.layoutWidget)
self.histogram.setObjectName(_fromUtf8("histogram"))
self.gridLayout.addWidget(self.histogram, 0, 1, 1, 2)
self.roiBtn = QtWidgets.QPushButton(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.roiBtn.sizePolicy().hasHeightForWidth())
self.roiBtn.setSizePolicy(sizePolicy)
self.roiBtn.setCheckable(True)
self.roiBtn.setObjectName(_fromUtf8("roiBtn"))
self.gridLayout.addWidget(self.roiBtn, 1, 1, 1, 1)
self.menuBtn = QtWidgets.QPushButton(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.menuBtn.sizePolicy().hasHeightForWidth())
self.menuBtn.setSizePolicy(sizePolicy)
self.menuBtn.setObjectName(_fromUtf8("menuBtn"))
self.gridLayout.addWidget(self.menuBtn, 1, 2, 1, 1)
self.roiPlot = PlotWidget(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.roiPlot.sizePolicy().hasHeightForWidth())
self.roiPlot.setSizePolicy(sizePolicy)
self.roiPlot.setMinimumSize(QtCore.QSize(0, 40))
self.roiPlot.setObjectName(_fromUtf8("roiPlot"))
self.gridLayout_3.addWidget(self.splitter, 0, 0, 1, 1)
self.normGroup = QtWidgets.QGroupBox(Form)
self.normGroup.setObjectName(_fromUtf8("normGroup"))
self.gridLayout_2 = QtWidgets.QGridLayout(self.normGroup)
self.gridLayout_2.setContentsMargins(0,0,0,0)
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.normSubtractRadio = QtWidgets.QRadioButton(self.normGroup)
self.normSubtractRadio.setObjectName(_fromUtf8("normSubtractRadio"))
self.gridLayout_2.addWidget(self.normSubtractRadio, 0, 2, 1, 1)
self.normDivideRadio = QtWidgets.QRadioButton(self.normGroup)
self.normDivideRadio.setChecked(False)
self.normDivideRadio.setObjectName(_fromUtf8("normDivideRadio"))
self.gridLayout_2.addWidget(self.normDivideRadio, 0, 1, 1, 1)
self.label_5 = QtWidgets.QLabel(self.normGroup)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout_2.addWidget(self.label_5, 0, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(self.normGroup)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1)
self.label_4 = QtWidgets.QLabel(self.normGroup)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_2.addWidget(self.label_4, 2, 0, 1, 1)
self.normROICheck = QtWidgets.QCheckBox(self.normGroup)
self.normROICheck.setObjectName(_fromUtf8("normROICheck"))
self.gridLayout_2.addWidget(self.normROICheck, 1, 1, 1, 1)
self.normXBlurSpin = QtWidgets.QDoubleSpinBox(self.normGroup)
self.normXBlurSpin.setObjectName(_fromUtf8("normXBlurSpin"))
self.gridLayout_2.addWidget(self.normXBlurSpin, 2, 2, 1, 1)
self.label_8 = QtWidgets.QLabel(self.normGroup)
self.label_8.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout_2.addWidget(self.label_8, 2, 1, 1, 1)
self.label_9 = QtWidgets.QLabel(self.normGroup)
self.label_9.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.gridLayout_2.addWidget(self.label_9, 2, 3, 1, 1)
self.normYBlurSpin = QtWidgets.QDoubleSpinBox(self.normGroup)
self.normYBlurSpin.setObjectName(_fromUtf8("normYBlurSpin"))
self.gridLayout_2.addWidget(self.normYBlurSpin, 2, 4, 1, 1)
self.label_10 = QtWidgets.QLabel(self.normGroup)
self.label_10.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.gridLayout_2.addWidget(self.label_10, 2, 5, 1, 1)
self.normOffRadio = QtWidgets.QRadioButton(self.normGroup)
self.normOffRadio.setChecked(True)
self.normOffRadio.setObjectName(_fromUtf8("normOffRadio"))
self.gridLayout_2.addWidget(self.normOffRadio, 0, 3, 1, 1)
self.normTimeRangeCheck = QtWidgets.QCheckBox(self.normGroup)
self.normTimeRangeCheck.setObjectName(_fromUtf8("normTimeRangeCheck"))
self.gridLayout_2.addWidget(self.normTimeRangeCheck, 1, 3, 1, 1)
self.normFrameCheck = QtWidgets.QCheckBox(self.n
|
ormGroup)
self.normFrameCheck.setObjectName(_fromUtf8("normFrameCheck"))
self.gridLayout_2.addWidget(self.normFrameCheck, 1, 2, 1, 1)
self.normTBlurSpin = QtWidgets.QDoubleSpinBox(self.normGroup)
self.normTBlurSpin.setObjectName(_fromUtf8("normTBlurSpin"))
self.gridLayout_2.add
|
Widget(self.normTBlurSpin, 2, 6, 1, 1)
self.gridLayout_3.addWidget(self.normGroup, 1, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.roiBtn.setText(_translate("Form", "ROI", None))
self.menuBtn.setText(_translate("Form", "Menu", None))
self.normGroup.setTitle(_translate("Form", "Normalization", None))
self.normSubtractRadio.setText(_translate("Form", "Subtract", None))
self.normDivideRadio.setText(_translate("Form", "Divide", None))
self.label_5.setText(_translate("Form"
|
flukiluke/eris
|
db.py
|
Python
|
mit
| 686
| 0.010204
|
import sqlite3
def cursor():
globa
|
l conn
return conn.cursor()
def commit(
|
):
global conn
conn.commit()
def insert(table, data):
global conn
c = conn.cursor()
keys = [*data]
template_list = ','.join(['?'] * len(data))
query = "INSERT INTO {} ({}) VALUES ({})".format(table, ','.join(keys), template_list)
c.execute(query, tuple(data[k] for k in keys))
conn.commit()
def start():
global conn
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS quotes (content TEXT)")
c.execute("CREATE TABLE IF NOT EXISTS alerts (target TEXT, time INTEGER, message TEXT)")
conn.commit()
conn = sqlite3.connect('persist.db')
start()
|
thusoy/grunt-pylint
|
test/fixtures/test_package/camelcasefunc.py
|
Python
|
mit
| 144
| 0.006944
|
""" This modul
|
e should trigger a linting error for camelcase function name. """
def camelCaseFunc():
""
|
" This function has a bad name. """
|
sahabi/keyman
|
main.py
|
Python
|
mit
| 33
| 0
|
fr
|
om ke
|
yman.interface import app
|
vileopratama/vitech
|
src/openerp/addons/base/tests/test_misc.py
|
Python
|
mit
| 1,108
| 0.000903
|
import unittest
from openerp.tools import misc
class test_countingstream(unittest.TestCase):
def test_empty_stream(self):
s = misc.CountingStream(iter([]))
self.assertEqual(s.index, -1)
|
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
def test_single(self):
s = misc.CountingStream(xrange(1))
self.assertEqual(s.index, -1)
self.assertEqual(next(s, None), 0)
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 1)
def test_full(self):
s = misc.CountingStream(xrange(42))
for _ in s:
|
pass
self.assertEqual(s.index, 42)
def test_repeated(self):
""" Once the CountingStream has stopped iterating, the index should not
increase anymore (the internal state should not be allowed to change)
"""
s = misc.CountingStream(iter([]))
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
self.assertIsNone(next(s, None))
self.assertEqual(s.index, 0)
if __name__ == '__main__':
unittest.main()
|
StSchulze/pymzML
|
tests/ms2_spec_test.py
|
Python
|
mit
| 2,583
| 0.003097
|
import sys
import os
import unittest
sys.path.append(os.path.abspath("."))
import pymzml
from pymzml.spec import PROTON
import pymzml.run as run
import test_file_paths
import numpy as np
class SpectrumMS2Test(unittest.TestCase):
"""
BSA test file
Peptide @
Scan: 2548
RT [min] 28.96722412109367
Selected_precursor [(443.711242675781, 0.0)]
"""
def setUp(self):
"""
"""
# self.paths = [
# os.path.join( DATA_FOLDER, file ) for file in DATA_FILES]
self.paths = test_file_paths.paths
path = self.paths[9]
self.Run = run.Reader(path)
self.spec = self.Run[2548]
def test_scan_time(self):
scan_time = self.spec.scan_time_in_minutes()
self.assertIsNotNone(scan_time)
self.assertIsInstance(scan_time, float)
self.assertEqual(round(scan_time, 4), round(28.96722412109367, 4))
def test_select_precursors(self):
selected_precursor = self.spec.selected_precursors
self.assertIsInstance(selected_precursor[0], dict)
self.assertIsInstance(selected_precursor[0]["mz"], float)
self.assertIsInstance(selected_precursor[0]["i"], floa
|
t)
self.assertIsInstance(selected_precursor[0]["charge"], int)
self.assertEqual(
selected_precursor, [{"mz": 443.711242675781, "i": 0.0, "charge
|
": 2}]
)
@unittest.skipIf(pymzml.spec.DECON_DEP is False, "ms_deisotope was not installed")
def test_deconvolute_peaks(self):
charge = 3
test_mz = 430.313
arr = np.array([(test_mz, 100), (test_mz + PROTON / charge, 49)])
spec = self.Run[2548]
spec.set_peaks(arr, "centroided")
decon = spec.peaks("deconvoluted")
self.assertEqual(len(decon), 1)
decon_mz = (test_mz * charge) - charge * PROTON
self.assertEqual(decon[0][0], decon_mz)
self.assertEqual(decon[0][1], 149) # 149 since itensities are 100 and 49
self.assertEqual(decon[0][2], 3)
def test_remove_precursor_peak(self):
# breakpoint()
# breakpoint()
test_mz = 443.71124268 # precursor peak
self.spec.set_peaks(np.array([(test_mz, 200)]), "centroided")
self.spec.set_peaks(np.array([(test_mz, 200)]), "raw")
assert self.spec.has_peak(test_mz)
self.spec._transformed_mz_with_error = None
new_peaks = self.spec.remove_precursor_peak()
found_peaks = self.spec.has_peak(test_mz)
assert len(found_peaks) == 0
if __name__ == "__main__":
unittest.main(verbosity=3)
|
benoitbryon/xal
|
xal/dir/provider.py
|
Python
|
bsd-3-clause
| 895
| 0
|
# -*- coding: utf-8 -*-
"""Base stuff for providers that handle filesystem directories."""
from xal.provider import ResourceProvider
from xal.dir.resource import Dir
class DirProvider(ResourceProvider):
"""Base class for filesystem directori
|
es."""
def __init__(self, resource_factory=Dir):
super(DirProvider, self).__init__(resource_factory=resource_factory)
@property
def home(self):
raise NotImplementedError()
@property
def sep(self):
if self.xal_session.sys.is_posix:
return '/'
elif self.xal_session.sys.is_windows:
|
return '\\'
def join(self, *args):
modified_args = args[:]
for key, value in enumerate(modified_args):
modified_args[key] = value.strip(self.sep)
return self.sep.join(*modified_args)
def abspath(self, path):
raise NotImplementedError()
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/operations/_virtual_hubs_operations.py
|
Python
|
mit
| 26,953
| 0.004786
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubsOperations(object):
"""VirtualHubsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualHub"
"""Retrieves the details of a VirtualHub.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualHub, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.VirtualHub
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
virtual_hub_parameters, # type: "_models.VirtualHub"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualHub"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_pa
|
rameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._
|
serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_hub_parameters, 'VirtualHub')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
virtual_hub_parameters, # type: "_models.VirtualHub"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualHub"]
"""Creates a VirtualHub resource if it doesn't exist else updates the existing VirtualHub.
:param resource_group_name: The resource group na
|
mansonul/events
|
events/contrib/plugins/form_elements/fields/select_multiple_with_max/conf.py
|
Python
|
mit
| 1,127
| 0
|
from django.conf im
|
port settings
from . import defaults
__title__ = 'fobi.contrib.plugins.form_elements.fields.' \
'select_multiple_with_max.conf'
__author__ = 'Artur Barseghyan <artur.barseghya
|
n@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('get_setting',)
def get_setting(setting, override=None):
"""Get setting.
Get a setting from
`fobi.contrib.plugins.form_elements.fields.select_multiple_with_max`
conf module, falling back to the default.
If override is not None, it will be used instead of the setting.
:param setting: String with setting name
:param override: Value to use when no setting is available. Defaults
to None.
:return: Setting value.
"""
if override is not None:
return override
if hasattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
):
return getattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
)
else:
return getattr(defaults, setting)
|
cycladesnz/chambersAndCreatures
|
src/effects/__init__.py
|
Python
|
gpl-2.0
| 175
| 0.005714
|
import os
files = os.listdir(os.path.join('src','effects'))
for file in files:
if file[-3:] == '.py' and f
|
ile[:2] != '__':
exec('from .%s
|
import *' % (file[:-3]))
|
Archman/felapps
|
tests/test2.py
|
Python
|
mit
| 8,381
| 0.022193
|
#!/usr/bin/env python
import wx
from wx.lib.agw import floatspin as fs
import numpy as np
class MyFrame(wx.Frame):
def __init__(self, *args, **kws):
super(self.__class__,self).__init__(*args, **kws)
nb1 = MyNB(self)
self.Show()
class MyFrame1(wx.Frame):
def __init__(self, *args, **kws):
super(self.__class__,self).__init__(*args, **kws)
self.curvalue = 2.3
self.minValue = 0.2
self.maxValue = 9.1
self.incValue = 0.1
self.facValue = 10
self.slider_min = self.minValue*self.facValue
self.slider_max = self.maxValue*self.facValue
self.slider_cur = self.curvalue*self.facValue
self.slider_num = int((self.slider_max - self.slider_min)/(self.incValue*self.facValue) + 1)
self.sliderTicRange = np.linspace(self.slider_min, self.slider_max, self.slider_num)
self.sliderValRange = np.linspace(self.minValue, self.maxValue, self.slider_num)
self.iniUI1()
#self.iniUI()
self.Show()
def iniUI1(self):
self.panel = wx.Panel(self)
self.slider = FloatSlider(self.panel, value = 1.0, minValue = 1, maxValue = 100)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.slider, proportion = 0, flag = wx.EXPAND)
self.panel.SetSizer(vbox)
self.Bind(wx.EVT_SLIDER, self.onSlider, self.slider)
def onSlider(self, event):
obj = event.GetEventObject()
print obj.GetValue()
def iniUI(self):
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour((170, 238, 170))
self._slider = wx.Slider(self.panel, value = self.slider_cur,
minValue = self.slider_min, maxValue = self.slider_max,
style = wx.SL_HORIZONTAL)
self._min_label = wx.StaticText(self.panel, label = str(self.minValue))
self._max_label = wx.StaticText(self.panel, label = str(self.maxValue))
self._val_label = wx.StaticText(self.panel, label = str(self.curvalue))
self.hbox_top = wx.BoxSizer(wx.HORIZONTAL)
self.hbox_top.Add(self._val_label, proportion = 0, flag = wx.ALIGN_CENTER)
self.hbox_down = wx.BoxSizer(wx.HORIZONTAL)
self.hbox_down.Add(self._min_label, proportion = 0, flag = wx.EXPAND | wx.ALIGN_CENTRE)
self.hbox_down.Add(self._slider, proportion = 2, flag = wx.EXPAND | wx.ALIGN_CENTRE | wx.LEFT | wx.RIGHT, border = 10)
self.hbox_down.Add(self._max_label, proportion = 0, flag = wx.EXPAND | wx.ALIGN_CENTRE)
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.hbox_top, proportion = 0, flag = wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.vbox.Add(self.hbox_down, proportion = 0, flag = wx.EXPAND | wx.ALIGN_CENTER | wx.LEFT | wx.RIGHT, border = 10)
self.panel.SetSizer(self.vbox)
self.Bind(wx.EVT_SLIDER, self.onFSlider, self._slider)
def onFSlider(self, event):
obj = event.GetEventObject()
ticValue = obj.GetValue() - obj.GetMin()
curVal = self.sliderValRange[ticValue]
print ticValue, curVal
self._val_label.SetLabel(str(curVal))
class MyNB(wx.Notebook):
def __init__(self, parent, *args, **kws):
super(self.__class__, self).__init__(parent=parent, style = wx.NB_TOP, *args, **kws)
# panel #1
self.panel1 = MyPanel(self)
self.panel1.SetBackgroundColour(wx.Colour(0, 0, 255))
self.spinctrl = fs.FloatSpin(self.panel1, value = '0.1', min_val = 0.1, max_val = 0.9, digits = 2, increment = 0.01)#, agwStyle = fs.FS_READONLY)
# panel #2
self.panel2 = MyPanel(self)
self.panel2.SetBackgroundColour(wx.Colour(0, 255, 255))
self.btn2 = wx.Button(self.panel2, label = 'choose color')
# panel #3
self.panel3 = MyPanel(self)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
p1 = FloatSlider(self.panel3, value = 1.0, minValue = 0.1, maxValue = 9.1)
print p1.GetSize()
#p1 = wx.Panel(self.panel3)
#p1 = wx.Button(self.panel3, label = 'btn1')
#p1.SetBackgroundColour('red')
p2 = wx.Panel(self.panel3)
#p2 = wx.Button(self.panel3, label = 'btn2')
p2.SetBackgroundColour('blue')
p3 = wx.Panel(self.panel3)
#p3 = wx.Button(self.panel3, label = 'btn3')
p3.SetBackgroundColour('yellow')
p4 = wx.Panel(self.panel3)
#p4 = wx.Button(self.panel3, label = 'btn4')
p4.SetBackgroundColour('green')
hbox1.Add(p1, proportion = 1, flag = wx.EXPAND)
hbox1.Add(p2, proportion = 1, flag = wx.EXPAND)
hbox2.Add(p3, proportion = 1, flag = wx.EXPAND)
hbox2.Add(p4, proportion = 1, flag = wx.EXPAND)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(hbox1, proportion = 1, flag = wx.EXPAND)
vbox.Add(hbox2, proportion = 1, flag = wx.EXPAND)
self.panel3.SetSizer(vbox)
# #1 Tab
self.AddPage(self.panel1, 'First Tab')
# #2 Tab
self.AddPage(self.panel2, 'Second Tab')
# #3 Tab
self.
|
AddPage(self.panel3, 'Third Tab')
# events
self.Bind(wx.EVT_BUTTON, self.onChooseColor, self.btn2)
def onChooseColor(self, event)
|
:
dlg = wx.ColourDialog(self)
dlg.GetColourData().SetChooseFull(True)
if dlg.ShowModal() == wx.ID_OK:
color = dlg.GetColourData().GetColour()
self.panel2.SetBackgroundColour(color)
print color.GetAsString(wx.C2S_HTML_SYNTAX)
dlg.Destroy()
class MyFrame2(wx.Frame):
def __init__(self, *args, **kws):
super(self.__class__,self).__init__(*args, **kws)
slide = FloatSlider(self,0.2,0.1,1.0,0.01)
self.Show()
class MyPanel(wx.Panel):
def __init__(self, parent, *args, **kws):
super(self.__class__, self).__init__(parent=parent, *args, **kws)
"""
class FloatSlider(wx.Slider):
#def __init__(self, parent, *args, **kws):
# super(self.__class__, self).__init__(parent, *args, **kws)
def GetValue(self):
return float(wx.Slider.GetValue(self))/self.GetMax()
"""
class FloatSlider(wx.Slider):
def __init__(self, parent, id = wx.ID_ANY, value = 0, minvValue = 0, maxValue = 10, increment = 0.1,
size = wx.DefaultSize, style = wx.SL_HORIZONTAL, *args, **kws):
self._value = value
self._min = minValue
self._max = maxValue
self._inc = increment
ival, imin, imax = [round(v/res) for v in (value, minValue, maxValue)]
self._islider = super(FloatSlider, self)
self._islider.__init__(parent = parent, value = ival, minValue = imin, maxValue = imax, id = id, size = size, style = style, *args, **kws)
self.Bind(wx.EVT_SCROLL, self._OnScroll, self._islider)
def _OnScroll(self, event):
ival = self._islider.GetValue()
imin = self._islider.GetMin()
imax = self._islider.GetMax()
if ival == imin:
self._value = self._min
elif ival == imax:
self._value = self._max
else:
self._value = ival * self._inc
event.Skip()
print 'OnScroll: value=%f, ival=%d' % (self._value, ival)
def GetValue(self):
return self._value
def GetMin(self):
return self._min
def GetMax(self):
return self._max
def GetInc(self):
return self._inc
def SetValue(self, value):
self._islider.SetValue(round(value/self._res))
self._value = value
def SetMin(self, minval):
self._islider.SetMin(round(minval/self._res))
self._min = minval
def SetMax(self, maxval):
self._islider.SetMax(round(maxval/self._res))
self._max = maxval
def SetInc(self, inc):
self._islider.SetRange(round(self._min/inc), round(self._max/inc))
self._islider.SetValue(round(self._value/inc))
self._inc = inc
def SetRange(self, minval, maxval):
self._islider.SetRange(round(minval/self._res), round(maxval/self._res))
self._min = minval
|
NoahFlowa/CTC_Projects
|
Osterhout_Python/Kirby_Physics.py
|
Python
|
mit
| 1,794
| 0.043478
|
# Programmer: Noah Osterhout
# Date: September 30th 2016 1:40PM EST
# Project: Kirby_Physics.py
#Ask what Problem they will be using
print()
print("This Program will find the mis
|
isng Variables using the three known ones and using PEMDAS")
print()
beetles_mem = input("What Beetles member will you be using? ")
gravity_global = -9.8
if beetles_mem == "John":
john_time = int(input("What is the Time in seconds? "))
new_john_time = john_time ** 2
john_Vi = int(input("What is the Initial Velocity
|
? "))
#Calculate using John Formula
john_formula = .5 * gravity_global * new_john_time
print("The Distance would be", john_formula)
elif beetles_mem == "Paul":
paul_Vf = int(input("What is the Final Velocity? "))
paul_Vi = int(input("What is the Intial Velocity? "))
paul_time = int(input("What is the Time in seconds? "))
#Calculate using Paul Formula
paul_formula = .5 * (paul_Vf + paul_Vi) * paul_time
print("The Distance would be", paul_formula)
elif beetles_mem == "George":
george_Vi = int(input("What is the Intial Velocity? "))
george_time = int(input("What is the Time in seconds? "))
#Calculate using George Formula
george_formula = george_Vi + gravity_global * george_time
print("The Final Velocity is", george_formula)
elif beetles_mem == "Ringo":
ringo_Vi = int(input("What is the Initial Velocity? "))
new_ringo_Vi = ringo_Vi ** 2
ringo_dist = int(input("What is the Distance? "))
#Calculate using Ringo Formula
ringo_formula = new_ringo_Vi + 2 * gravity_global * ringo_dist
print("The Final Velocity is", ringo_formula, "EE 2")
elif beetles_mem == "Kirby":
print("Kirby wishes he was a Beetles member")
else: print("ERROR! Unknown Beetles Member!")
|
ntt-pf-lab/backup_keystone
|
keystone/controllers/extensions.py
|
Python
|
apache-2.0
| 752
| 0.00133
|
from webob import Response
from keystone import utils
from keystone.common import template, wsgi
class ExtensionsController(wsgi.Controller):
"""Controller for extensions related me
|
thods"""
def __init__(self, options):
super(ExtensionsController, self).__init__()
self.options = options
@utils.wrap_error
def get_extensions_info(self, req, path):
resp = Response()
if utils.is_xml_response(req):
resp_file = "%s.xml" % path
mime_type = "application/xml"
else:
resp_file = "%s.json" % path
mime_type =
|
"application/json"
return template.static_file(resp, req, resp_file,
root=utils.get_app_root(), mimetype=mime_type)
|
zhlinh/leetcode
|
0094.Binary Tree Inorder Traversal/test.py
|
Python
|
apache-2.0
| 874
| 0.006865
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from solution imp
|
ort Solution
from solution import TreeNode
def constructOne(s):
if s == '#':
return None
else:
return TreeNode(int(s))
def createTree(tree):
q = []
root = constructOne(tree[0]);
q.append(root);
idx =
|
1;
while q:
tn = q.pop(0)
if not tn:
continue
if idx == len(tree):
break
left = constructOne(tree[idx])
tn.left = left
q.append(left)
idx += 1
if idx == len(tree):
break
right = constructOne(tree[idx])
idx += 1
tn.right = right
q.append(right)
return root
# inpt = createTree(['1', '#', '2', '3'])
inpt = createTree(['1', '2', '3', '#' , '#', '4', '#', '#', '5'])
sol = Solution()
res = sol.inorderTraversal(inpt)
print(res)
|
davogler/venus
|
opstel/urls/entries.py
|
Python
|
mit
| 1,232
| 0.018669
|
from django.conf.urls.defaults import *
from django.views.generic.dates import YearArchiveView, MonthArchiveView, DayArchiveView, DateDetailView
from tinymce.widgets import TinyMCE
from tinymce.views import preview
from opstel.models import Entry
entry_info_dict = {'queryset':Entry.live.all(), 'date_field': 'pub_date', }
urlpatterns = patterns('',
# Pagination for the equivalent of archive_index generic view.
# The url is of the form http://host/page/4/
# In urls.py for example, ('^blog/page/(?P<page>\d)/$', get_archive_index),
url(r'^$', 'opstel.views.get_archive_index_first', ),
url(r'^page/(?P<page>\d)/$', 'opstel.views.get_ar
|
chive_index', ),
#url(r'^preview/$', 'preview', name= "preview"),
url(r'^(?P<year>\d{4})/$', YearArchiveView.as_view(**entry_info_dict), name= 'opstel_entry_archive_year'),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/$', MonthArchiveView.as_view(**entry_info_dict), name= 'opstel_entry_archive_month'),
url(r'^(?P<year>\d{4
|
})/(?P<month>\w{3})/(?P<day>\d{2})/$', DayArchiveView.as_view(**entry_info_dict), name= 'opstel_entry_archive_day'),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$', DateDetailView.as_view(**entry_info_dict), name= 'opstel_entry_detail'),
)
|
SUSE/azure-sdk-for-python
|
azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/operations/policy_assignments_operations.py
|
Python
|
mit
| 30,831
| 0.002465
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for the operation. Constant value: "2016-12-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def delete(
self, scope, policy_assignment_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_a
|
ssignment_name: The name of the policy assignment to
delete.
:type policy_assignment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operati
|
on configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/policyassignments/{policyAssignmentName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'policyAssignmentName': self._serialize.url("policy_assignment_name", policy_assignment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create(
self, scope, policy_assignment_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when
you apply a policy to a resource group that policy is assigned to all
resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/policyassignments/{policyAssignmentName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'policyAssignmentName': self._serialize.url("policy_assignment_name", policy_assignment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'PolicyAssignment')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, scope, policy_assignment_name, custom_headers=None, raw=False, **operation_config):
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to
get.
:type policy_assignment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=
|
lmccalman/spacerace
|
physics/engine.py
|
Python
|
mit
| 10,367
| 0.002026
|
""" Physics test sandbox for the space race game!
Alistair Reid 2015
"""
import matplotlib.pyplot as pl
import matplotlib as mpl
import numpy as np
from numpy.linalg import norm
from time import time, sleep
import os
def integrate(states, props, inp, walls, bounds, dt):
""" Implementing 4th order Runge-Kutta for a time stationary DE.
"""
derivs = lambda y: physics(y, props, inp, walls, bounds)
k1 = derivs(states)
k2 = derivs(states + 0.5*k1*dt)
k3 = derivs(states + 0.5*k2*dt)
k4 = derivs(states + k3*dt)
states += (k1 + 2*k2 + 2*k3 + k4)/6. * dt
def physics(states, props, inp, walls, bounds):
# Unpack state, input and property vectors
P = states[:, :2]
Th = states[:, 2:3]
V = states[:, 3:5]
W = states[:, 5:6]
m = props[:, 0:1]
I = props[:, 1:2]
rad = props[:, 2:3]
cd_a = props[:, 3:4] # coeff drag * area
f = inp[:, :1] * np.hstack((np.cos(Th), np.sin(Th)))
trq = inp[:, 1:2]
n = P.shape[0]
# Physics model parameters (hand tuned to feel right)
rho = 0.1 # Air density (or absorb into cd_a?)
k_elastic = 4000. # spring normal force
spin_drag_ratio = 1.8 # spin drag to forward drag
eps = 1e-5 # avoid divide by zero warnings
mu = 0.05 # coefficient of friction (tangent force/normal force)
mu_wall = 0.01 # wall friction param
sigmoid = lambda x: -1 + 2./(1. + np.exp(-x))
# Compute drag
f -= cd_a * rho * V * norm(V, axis=1)[:, np.newaxis]
trq -= spin_drag_ratio*cd_a * rho * W * np.abs(W) * rad**2
# Inter-ship collisions
checks = shortlist_collisions(P, 1.) # Apply test spatial hashing
for i, j in checks:
dP = P[j] - P[i]
dist = norm(dP) + eps
diameter = rad[i] + rad[j]
if dist < diameter:
# Direct collision: linear spring normal force
f_magnitude = (diameter-dist)*k_elastic
f_norm = f_magnitude * dP
f[i] -= f_norm
f[j] += f_norm
# Spin effects (ask Al to draw a free body diagram)
perp = np.array([-dP[1], dP[0]])/dist # surface perpendicular
v_rel = rad[i]*W[i] + rad[j]*W[j] + np.dot(V[i] - V[j], perp)
fric = f_magnitude * mu * sigmoid(v_rel)
f_fric = fric * perp
f[i] += f_fric
f[j] -= f_fric
trq[i] -= fric * rad[i]
trq[j] -= fric * rad[j]
# Wall collisions --> single body collisions
wall_info = linear_interpolate(walls, bounds, P)
# import IPython
# IPython.embed()
# exit()
for i in range(n):
dist = wall_info[i][0] - rad[i]
if dist < 0:
normal = wall_info[i][1:3]
# Linear spring normal force
f_norm_mag = -dist*k_elastic
f[i] += f_norm_mag * normal
# surface tangential force
perp = [-normal[1], normal[0]] # points left 90 degrees
v_rel = W[i] * rad[i] - np.dot(V[i], perp)
fric = f_norm_mag * mu_wall * sigmoid(v_rel)
f[i] += fric*perp
trq[i] -= fric * rad[i]
# Compose the gradient vector
return np.hstack((V, W, f/m, trq/I))
def shortlist_collisions(P, r):
# Use spatial hashing to shortlist possible collisions
n = P.shape[0]
all_cells = dict() # potential collisions
checks = set()
grid = r * 2. + 1e-5 # base off diameter
offsets = r*np.array([[1,1],[1,-1], [-1,1], [-1,-1]])
for my_id in range(n):
bins = [tuple(m) for m in np.floor((P[my_id] + offsets)/grid)]
for bin in bins:
if bin in all_cells:
for friend in all_cells[bin]:
checks.add((my_id, friend))
all_cells[bin].append(my_id)
else:
all_cells[bin] = [my_id]
return checks
def main():
resources = os.getcwd()[:-8]+'/mapbuilder/testmap_%s.npy'
wnx = np.load(resources % 'wnormx')
wny = np.load(resources % 'wnormy')
norm = np.sqrt(wnx**2 + wny**2) + 1e-5
wnx /= norm
wny /= norm
wdist = np.load(resources % 'walldist')
mapscale = 10
walls = np.dstack((wdist/mapscale, wnx, wny))
map_img = np.load(resources % 'occupancy') # 'walldist')
all_shape = np.array(map_img.shape).astype(float) / mapscale
bounds = [0, all_shape[1], 0, all_shape[0]]
# map_img = 0.25*(map_img[::2, ::2] + map_img[1::2,::2] + \
# map_img[::2, 1::2] + map_img[1::2, 1::2])
spawn = np.array([25, 25])/2. # x, y
spawn_size = 6/2.
n = 30
masses = 1. + 2*np.random.random(n)
masses[0] = 1.
Is = 0.25*masses
radius = np.ones(n)
cda = np.ones(n)
properties = np.vstack((masses, Is, radius, cda)).T
colours = ['r', 'b', 'g', 'c', 'm', 'y']
colours = (colours * np.ceil(n/len(colours)))[:n]
colours[0] = 'k'
# x, y, th, vx, vy, w
x0 = 2*(np.random.random(n) - 0.5) * spawn_size + spawn[0]
y0 = 2*(np.random.random(n) - 0.5) * spawn_size + spawn[1]
th0 = np.random.random(n) * np.pi * 2
vx0 = np.random.random(n) * 2 - 1
vy0 = np.random.random(n) * 2 - 1
w0 = np.random.random(n) * 2 - 1
states0 = np.vstack((x0, y0, th0, vx0, vy0, w0)).T
# Set up our spaceships
fig = pl.figure()
ax = pl.subplot(111)
# Draw the backdrop:
mapview = pl.imshow(-map_img, extent=bounds, cmap=pl.cm.gray, origin='lower')
cx = np.linspace(bounds[0], bounds[1], map_img.shape[1])
cy = np.linspace(bounds[2], bounds[3], map_img.shape[0])
cX, cY = np.meshgrid(cx, cy)
pl.contour(cX, cY, map_img, 1)
pl.show(block=False)
fig.canvas.draw()
background = [fig.canvas.copy_from_bbox(ax.bbox)]
sprites = []
for s, col, r in zip(states0, colours, radius):
vis = draw_outline(ax, s, col, r)
sprites.append(vis)
ax.set_xlim(bounds[0:2])
ax.set_ylim(bounds[2:4])
ax.set_aspect('equal')
dt = 0.02
start_time = time()
t = 0.
states = states0
event_count = 0
frame_rate = 30.
frame_time = 1./frame_rate
next_draw = frame_time
keys = set()
def press(event):
keys.add(event.key)
def unpress(event):
keys.remove(event.key)
def redo_background(event):
for s in sprites:
s.set_visible(False)
fig.canvas.draw()
background[0] = fig.canvas.copy_from_bbox(ax.bbox)
for s in sprites:
s.set_visible(True)
# event.width, eve
|
nt.height accessible
fig.canvas.mpl_connect('key_press_event', press)
fig.canvas.mpl_connect('key_release_event', unpress)
fig.canvas.mpl_connect('resize_event', redo_background)
print('Press Q to exit')
while 'q' not in keys:
# Advance the game state
while t < next_draw:
inputs = np.zeros((n, 2))
inputs[:, 1] = 3.0 # try to turn
inputs[:, 0] =
|
100 # some forward thrust!
# give the user control of ship 0
if 'right' in keys:
inputs[0, 1] = -10.
elif 'left' in keys:
inputs[0, 1] = 10.
else:
inputs[0, 1] = 0.
if 'up' in keys:
inputs[0, 0] = 100
else:
inputs[0, 0] = 0
t += dt
integrate(states, properties, inputs, walls, bounds, dt)
# Draw at the desired framerate
this_time = time() - start_time
if this_time > next_draw:
next_draw += frame_time
# blit the background
fig.canvas.restore_region(background[0])
for state, col, r, sprite in zip(states, colours, radius, sprites):
draw_outline(ax, state, col, r, handle=sprite)
fig.canvas.blit(ax.bbox)
event_count += 1
fig.canvas.flush_events()
else:
sleep((next_draw - this_time)*0.25)
def draw_outline(ax, state, c, radius, handle=None, n=15):
x, y, th, vx, vy, w = state
# m, I, radius, c = props
# base_theta = np.linspace(np.pi-2, np.pi+2, n-1)
# base_theta[0] = 0
# base_theta[-1] = 0
ba
|
NicholasAsimov/courses
|
6.00.1x/final/p4-1.py
|
Python
|
mit
| 459
| 0.004357
|
def getSublists(L, n):
subl
|
ists = []
for i in range(len(L)):
next_sublist = L[i:i+n]
if len(next_sublist) == n:
sublists.append(next_sublist)
return sublists
# Test Cases
L = [10, 4, 6, 8, 3, 4, 5, 7, 7, 2]
print getSublists(L, 4) == [[10, 4, 6, 8], [4, 6, 8, 3], [6, 8, 3, 4], [8, 3, 4, 5], [3, 4, 5, 7], [4, 5, 7, 7], [5, 7, 7, 2]]
L = [1, 1, 1, 1, 4]
print getSublist
|
s(L, 2) == [[1, 1], [1, 1], [1, 1], [1, 4]]
|
ShadauxCat/csbuild
|
UnitTests/Android/unit_test_android.py
|
Python
|
mit
| 622
| 0.016077
|
import csbuild
#csbuild.SetActiveToolchain("android")
@csbuild.project("AndroidTest_Basic", "AndroidTest_Basic")
def AndroidTest_Basic():
csbuild.T
|
oolchain("android").SetCcCommand("gcc")
csbuild.Toolchain("android").SetCxxCommand("g++")
csbuild.Toolchain("android").SetPackageName("csbuild.UnitTest.AndroidBasic")
csbuild.Toolchain("android").SetActivityName("CSBUnitTestAndroidBasic")
|
csbuild.DisablePrecompile()
csbuild.SetOutput("AndroidTest_Basic", csbuild.ProjectType.Application)
csbuild.Toolchain("android").AddLibraries("android", "m", "log", "dl", "c")
csbuild.SetSupportedToolchains("msvc", "android")
|
kobejean/tensorflow
|
tensorflow/python/autograph/converters/asserts_test.py
|
Python
|
apache-2.0
| 1,319
| 0.002274
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for asserts module."""
from __future__ import absolute_import
from __future__ import division
f
|
rom __future__ import print_function
import gast
from tensorflow.python.autograph.converters import asserts
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.platform import test
class AssertsTest(converter_testing.TestCase):
def test_t
|
ransform(self):
def test_fn(a):
assert a > 0
node, ctx = self.prepare(test_fn, {})
node = asserts.transform(node, ctx)
self.assertTrue(isinstance(node.body[0].value, gast.Call))
if __name__ == '__main__':
test.main()
|
masschallenge/impact-api
|
web/impact/impact/schema.py
|
Python
|
mit
| 482
| 0
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from rest_framework import response, schemas
from rest_framework.decorators import (
api_view,
|
renderer_classes,
)
from drf_yasg.renderers import (
|
OpenAPIRenderer,
SwaggerUIRenderer,
)
@api_view()
@renderer_classes([OpenAPIRenderer, SwaggerUIRenderer])
def schema_view(request):
generator = schemas.SchemaGenerator(title='Impact API')
return response.Response(generator.get_schema(request=request))
|
captainhungrykaboom/MTAT.TK.006
|
6. märts - 12. märts ülesanded/harjutus ülesanne 6.py
|
Python
|
mit
| 62
| 0.016129
|
a = "1"
b = 1
print("A
|
rvud on " + 5 * a + " ja " + str(
|
5 * b))
|
mauroalberti/gsf
|
pygsf/utils/qt_utils/filesystem.py
|
Python
|
gpl-3.0
| 1,060
| 0.003774
|
from builtins import str
from qgis.PyQt.QtCore import QFileInfo
from qgis.PyQt.QtWidgets import QFileDialog
def update_directory_key(settings, settings_dir_key, fileName):
"""
modified from module RASTERCALC by Barry Rowlingson
"""
path = QFileInfo(fileName).absolutePath()
settings.setValue(settings_dir_key,
str(path))
def new_file_path(parent, show_msg, path, filter_text):
output_filename, __ = QFileDialog.getSaveFileName(
parent,
show_msg,
path,
filter_text
)
if not output_filename:
return ''
else:
return output_filename
def old_file_path(parent, show_msg, filter_extension, filter_text):
input_filename, __ = QFileDialog.getO
|
penFileName(parent,
parent.tr(show_msg),
filter_extension,
filter_text)
if not input_filename:
return ''
else:
return in
|
put_filename
|
neharejanjeva/techstitution
|
venv/lib/python2.7/site-packages/pymongo/collection.py
|
Python
|
cc0-1.0
| 104,519
| 0.000287
|
# Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Collection level utilities for Mongo."""
import warnings
from bson.code import Code
from bson.objectid import ObjectId
from bson.son import SON
from pymongo import (bulk,
common,
helpers,
message,
results)
from pymongo.command_cursor import CommandCursor
from pymongo.cursor import Cursor
from pymongo.errors import InvalidName, OperationFailure
from pymongo.helpers import _check_write_command_response
from pymongo.message import _INSERT, _UPDATE, _DELETE
from pymongo.operations import _WriteOp
from pymongo.read_preferences import ReadPreference
try:
from collections import OrderedDict
ordered_types = (SON, OrderedDict)
except ImportError:
ordered_types = SON
def _gen_index_name(keys):
"""Generate an index name from the set of fields it is over.
"""
return u"_".join([u"%s_%s" % item for item in keys])
class ReturnDocument(object):
"""An enum used with
:meth:`~pymongo.collection.Collection.find_one_and_replace` and
:meth:`~pymongo.collection.Collection.find_one_and_update`.
"""
BEFORE = False
"""Return the original document before it was updated/replaced, or
``None`` if no document matches the query.
"""
AFTER = True
"""Return the updated/replaced or inserted document."""
class Collection(common.BaseObject):
"""A Mongo collection.
"""
def __init__(self, database, name, create=False, codec_options=None,
read_preference=None, write_concern=None, **kwargs):
"""Get / create a Mongo collection.
Raises :class:`TypeError` if `name` is not an instance of
:class:`basestring` (:class:`str` in python 3). Raises
:class:`~pymongo.errors.InvalidName` if `name` is not a valid
collection name. Any additional keyword arguments will be used
as options passed to the create command. See
:meth:`~pymongo.database.Database.create_collection` for valid
options.
If `create` is ``True`` or additional keyword arguments are
present a create command will be sent. Otherwise, a create
command will not be sent and the collection will be created
implicitly on first use.
:Parameters:
- `database`: the database to get a collection from
- `name`: the name of the collection to get
- `create` (optional): if ``True``, force collection
creation even without options being set
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`. If ``None`` (the
default) database.codec_options is used.
- `read_preference` (optional): The read preference to use. If
``None`` (the default) database.read_preference is used.
- `write_concern` (optional): An instance of
:class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the
default) database.write_concern is used.
- `**kwargs` (optional): additional keyword arguments will
be passed as options for the create collection command
.. versionchanged:: 2.9
Added the codec_options, read_preference, and write_concern options.
.. versionchanged:: 2.2
Removed deprecated argument: options
.. versionadded:: 2.1
uuid_subtype attribute
.. versionchanged:: 1.5
deprecating `options` in favor of kwargs
.. versionadded:: 1.5
the `create` parameter
.. mongodoc:: collections
"""
opts, mode, tags, wc_doc = helpers._get_common_options(
database, codec_options, re
|
ad_preference, write_concern)
salms = database.secondary_acceptable_latency_ms
super(Collection, self).__init__(
codec_options=opts,
read_preference=mode,
tag_sets=tags,
secondary_acceptable_latency_ms=salms,
slave_okay=database.slave_okay,
safe=database.safe,
**wc_doc)
|
if not isinstance(name, basestring):
raise TypeError("name must be an instance "
"of %s" % (basestring.__name__,))
if not name or ".." in name:
raise InvalidName("collection names cannot be empty")
if "$" in name and not (name.startswith("oplog.$main") or
name.startswith("$cmd")):
raise InvalidName("collection names must not "
"contain '$': %r" % name)
if name[0] == "." or name[-1] == ".":
raise InvalidName("collection names must not start "
"or end with '.': %r" % name)
if "\x00" in name:
raise InvalidName("collection names must not contain the "
"null character")
self.__database = database
self.__name = unicode(name)
self.__full_name = u"%s.%s" % (self.__database.name, self.__name)
if create or kwargs:
self.__create(kwargs)
def __create(self, options):
"""Sends a create command with the given options.
"""
if options:
if "size" in options:
options["size"] = float(options["size"])
self.__database.command("create", self.__name,
read_preference=ReadPreference.PRIMARY,
**options)
else:
self.__database.command("create", self.__name,
read_preference=ReadPreference.PRIMARY)
def __getattr__(self, name):
"""Get a sub-collection of this collection by name.
Raises InvalidName if an invalid collection name is used.
:Parameters:
- `name`: the name of the collection to get
"""
return Collection(self.__database, u"%s.%s" % (self.__name, name))
def __getitem__(self, name):
return self.__getattr__(name)
def __repr__(self):
return "Collection(%r, %r)" % (self.__database, self.__name)
def __eq__(self, other):
if isinstance(other, Collection):
us = (self.__database, self.__name)
them = (other.__database, other.__name)
return us == them
return NotImplemented
def __ne__(self, other):
return not self == other
@property
def full_name(self):
"""The full name of this :class:`Collection`.
The full name is of the form `database_name.collection_name`.
.. versionchanged:: 1.3
``full_name`` is now a property rather than a method.
"""
return self.__full_name
@property
def name(self):
"""The name of this :class:`Collection`.
.. versionchanged:: 1.3
``name`` is now a property rather than a method.
"""
return self.__name
@property
def database(self):
"""The :class:`~pymongo.database.Database` that this
:class:`Collection` is a part of.
.. versionchanged:: 1.3
``database`` is now a property rather than a method.
"""
return self.__database
def with_options(
self, codec_options=None, read_preference=None, write_concern=None):
"""Get a clone of this collection changing the specified settings.
>>> from pymongo import ReadPreference
>>> coll1.read_preference == ReadPreference.PRIMARY
True
>>> coll2 = coll1.with_options(r
|
sidhart/antlr4
|
runtime/Python3/src/antlr4/atn/LexerATNSimulator.py
|
Python
|
bsd-3-clause
| 26,465
| 0.007255
|
#
# [The "BSD license"]
# Copyright (c) 2012 Terence Parr
# Copyright (c) 2012 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#/
# When we hit an accept state in either the DFA or the ATN, we
# have to notify the character stream to start buffering characters
# via {@link IntStream#mark} and record the current state. The current sim state
# includes the current index into the input, the current line,
# and current character position in that line. Note that the Lexer is
# tracking the starting line and characterization of the token. These
# variables track the "state" of the simulator when it hits an accept state.
#
# <p>We track these variables separately for the DFA and ATN simulation
# because the DFA simulation often has to fail over to the ATN
# simulation. If the ATN simulation fails, we need the DFA to fall
# back to its previously accepted state, if any. If the ATN succeeds,
# then the ATN does the accept and the DFA simulator that invoked it
# can simply return the predicted token type.</p
|
>
#/
from antlr4.PredictionContext import PredictionContextCache, SingletonPredictionContext, PredictionContext
from antlr4.InputStream import InputStream
from antlr4.Token import Token
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNConfig import LexerATNConfig
from antlr4.atn.ATNSimulator import ATNSimulator
from antlr4.atn.ATNConfigSet import ATNConfigSet, OrderedATNConfigSet
from antlr4.atn.A
|
TNState import RuleStopState, ATNState
from antlr4.atn.LexerActionExecutor import LexerActionExecutor
from antlr4.atn.Transition import Transition
from antlr4.dfa.DFAState import DFAState
from antlr4.error.Errors import LexerNoViableAltException, UnsupportedOperationException
class SimState(object):
def __init__(self):
self.reset()
def reset(self):
self.index = -1
self.line = 0
self.column = -1
self.dfaState = None
# need forward declaration
Lexer = None
LexerATNSimulator = None
class LexerATNSimulator(ATNSimulator):
debug = False
dfa_debug = False
MIN_DFA_EDGE = 0
MAX_DFA_EDGE = 127 # forces unicode to stay in ATN
ERROR = None
match_calls = 0
def __init__(self, recog:Lexer, atn:ATN, decisionToDFA:list, sharedContextCache:PredictionContextCache):
super().__init__(atn, sharedContextCache)
self.decisionToDFA = decisionToDFA
self.recog = recog
# The current token's starting index into the character stream.
# Shared across DFA to ATN simulation in case the ATN fails and the
# DFA did not have a previous accept state. In this case, we use the
# ATN-generated exception object.
self.startIndex = -1
# line number 1..n within the input#/
self.line = 1
# The index of the character relative to the beginning of the line 0..n-1#/
self.column = 0
from antlr4.Lexer import Lexer
self.mode = Lexer.DEFAULT_MODE
# Used during DFA/ATN exec to record the most recent accept configuration info
self.prevAccept = SimState()
def copyState(self, simulator:LexerATNSimulator ):
self.column = simulator.column
self.line = simulator.line
self.mode = simulator.mode
self.startIndex = simulator.startIndex
def match(self, input:InputStream , mode:int):
self.match_calls += 1
self.mode = mode
mark = input.mark()
try:
self.startIndex = input.index
self.prevAccept.reset()
dfa = self.decisionToDFA[mode]
if dfa.s0 is None:
return self.matchATN(input)
else:
return self.execATN(input, dfa.s0)
finally:
input.release(mark)
def reset(self):
self.prevAccept.reset()
self.startIndex = -1
self.line = 1
self.column = 0
self.mode = Lexer.DEFAULT_MODE
def matchATN(self, input:InputStream):
startState = self.atn.modeToStartState[self.mode]
if self.debug:
print("matchATN mode " + str(self.mode) + " start: " + str(startState))
old_mode = self.mode
s0_closure = self.computeStartState(input, startState)
suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = False
next = self.addDFAState(s0_closure)
if not suppressEdge:
self.decisionToDFA[self.mode].s0 = next
predict = self.execATN(input, next)
if self.debug:
print("DFA after matchATN: " + str(self.decisionToDFA[old_mode].toLexerString()))
return predict
def execATN(self, input:InputStream, ds0:DFAState):
if self.debug:
print("start state closure=" + str(ds0.configs))
if ds0.isAcceptState:
# allow zero-length tokens
self.captureSimState(self.prevAccept, input, ds0)
t = input.LA(1)
s = ds0 # s is current/from DFA state
while True: # while more work
if self.debug:
print("execATN loop starting closure: %s\n", s.configs)
# As we move src->trg, src->trg, we keep track of the previous trg to
# avoid looking up the DFA state again, which is expensive.
# If the previous target was already part of the DFA, we might
# be able to avoid doing a reach operation upon t. If s!=null,
# it means that semantic predicates didn't prevent us from
# creating a DFA state. Once we know s!=null, we check to see if
# the DFA state has an edge already for t. If so, we can just reuse
# it's configuration set; there's no point in re-computing it.
# This is kind of like doing DFA simulation within the ATN
# simulation because DFA simulation is really just a way to avoid
# computing reach/closure sets. Technically, once we know that
# we have a previously added DFA state, we could jump over to
# the DFA simulator. But, that would mean popping back and forth
# a lot and making things more complicated algorithmically.
# This optimization makes a lot of sense for loops within DFA.
# A character will take us back to an existing DFA state
# that already has lots of edges out of it. e.g., .* in comments.
# print("Target for:" + str(s) + " and:" + str(t))
target = self.getExistingTargetState(s, t)
# print("Existing:" + str(target))
if target is None:
target = self.computeTargetState(input, s, t)
# print("Computed:" + str(target))
if target == self.ERROR:
|
foer/linuxmuster-client-unity
|
tests/autopilot/unity/emulators/panel.py
|
Python
|
gpl-3.0
| 11,333
| 0.000794
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Copyright 2012 Canonical
# Author: Marco Trevisan (Treviño)
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
from __future__ import absolute_import
import logging
from time import sleep
from autopilot.input import Mouse
from autopilot.keybindings import KeybindingsHelper
from unity.emulators import UnityIntrospectionObject
logger = logging.getLogger(__name__)
class PanelController(UnityIntrospectionObject):
"""The PanelController class."""
def get_panel_for_monitor(self, monitor_num):
"""Return an instance of panel for the specified monitor, or None."""
panels = self.get_children_by_type(UnityPanel, monitor=monitor_num)
assert(len(panels) == 1)
return panels[0]
def get_active_panel(self):
"""Return the active panel, or None."""
panels = self.get_children_by_type(UnityPanel, active=True)
assert(len(panels) == 1)
return panels[0]
def get_active_indicator(self):
for panel in self.get_panels:
active = panel.get_active_indicator()
if active:
return active
return None
@property
def get_panels(self):
"""Return the available panels, or None."""
return self.get_children_by_type(UnityPanel)
class UnityPanel(UnityIntrospectionObject, KeybindingsHelper):
"""An individual panel for a monitor."""
def __init__(self, *args, **kwargs):
super(UnityPanel, self).__init__(*args, **kwargs)
self._mouse = Mouse.create()
def __get_menu_view(self):
"""Return the menu view."""
menus = self.get_children_by_type(MenuView)
assert(len(menus) == 1)
return menus[0]
def __get_window_buttons(self):
"""Return the window buttons view."""
buttons = self.menus.get_children_by_type(WindowButtons)
assert(len(buttons) == 1)
return buttons[0]
def __get_grab_area(self):
"""Return the panel grab area."""
grab_areas = self.menus.get_children_by_type(GrabArea)
assert(len(grab_areas) == 1)
return grab_areas[0]
def __get_indicators_view(self):
"""Return the menu view."""
indicators = self.get_children_by_type(Indicators)
assert(len(indicators) == 1)
return indicators[0]
def move_mouse_below_the_panel(self):
"""Places the mouse to bottom of this panel."""
(x, y, w, h) = self.geometry
target_x = x + w / 2
target_y = y + h + 10
logger.debug("Moving mouse away from panel.")
self._mouse.move(target_x, target_y)
def move_mouse_over_menus(self):
"""Move the mouse over the menu area for this panel."""
(x, y, w, h) = self.menus.geometry
target_x = x + w / 2
target_y = y + h / 2
# The menu view has bigger geometry than the real layout
|
menu_entries = self.menus.get_entries()
if len(menu_entries) > 0:
first_x = menu_entries[0].x
last_x = menu_entries[-1].x + menu_entries[-1].width / 2
target_x = first_x + (last_x - first_x) / 2
logger.debug("Moving mouse to center of menu area.")
self._mouse.move(target_x, target_y)
def move_mouse_over_grab_area(self):
"""Move the mouse over the grab a
|
rea for this panel."""
(x, y, w, h) = self.grab_area.geometry
target_x = x + w / 2
target_y = y + h / 2
logger.debug("Moving mouse to center of grab area.")
self._mouse.move(target_x, target_y)
def move_mouse_over_window_buttons(self):
"""Move the mouse over the center of the window buttons area for this panel."""
(x, y, w, h) = self.window_buttons.geometry
target_x = x + w / 2
target_y = y + h / 2
logger.debug("Moving mouse to center of the window buttons.")
self._mouse.move(target_x, target_y)
def move_mouse_over_indicators(self):
"""Move the mouse over the center of the indicators area for this panel."""
(x, y, w, h) = self.indicators.geometry
target_x = x + w / 2
target_y = y + h / 2
logger.debug("Moving mouse to center of the indicators area.")
self._mouse.move(target_x, target_y)
def get_indicator_entries(self, visible_only=True, include_hidden_menus=False):
"""Returns a list of entries for this panel including both menus and indicators"""
entries = []
if include_hidden_menus or self.menus_shown:
entries = self.menus.get_entries()
entries += self.indicators.get_ordered_entries(visible_only)
return entries
def get_active_indicator(self):
"""Returns the indicator entry that is currently active"""
entries = self.get_indicator_entries(False, True)
entries = filter(lambda e: e.active == True, entries)
assert(len(entries) <= 1)
return entries[0] if entries else None
def get_indicator_entry(self, entry_id):
"""Returns the indicator entry for the given ID or None"""
entries = self.get_indicator_entries(False, True)
entries = filter(lambda e: e.entry_id == entry_id, entries)
assert(len(entries) <= 1)
return entries[0] if entries else None
@property
def title(self):
return self.menus.panel_title
@property
def desktop_is_active(self):
return self.menus.desktop_active
@property
def menus_shown(self):
return self.active and self.menus.draw_menus
@property
def window_buttons_shown(self):
return self.menus.draw_window_buttons
@property
def window_buttons(self):
return self.__get_window_buttons()
@property
def menus(self):
return self.__get_menu_view()
@property
def grab_area(self):
return self.__get_grab_area()
@property
def indicators(self):
return self.__get_indicators_view()
@property
def geometry(self):
"""Returns a tuple of (x,y,w,h) for the current panel."""
return (self.x, self.y, self.width, self.height)
class MenuView(UnityIntrospectionObject):
"""The Menu View class."""
def get_entries(self):
"""Return a list of menu entries"""
entries = self.get_children_by_type(IndicatorEntry)
# We need to filter out empty entries, which are seperators - those
# are not valid, visible and working entries
# For instance, gedit adds some of those, breaking our tests
entries = [e for e in entries if (e.label != "")]
return entries
def get_menu_by_label(self, entry_label):
"""Return the first indicator entry found with the given label"""
indicators = self.get_children_by_type(IndicatorEntry, label=entry_label)
return indicators[0] if indicators else None
@property
def geometry(self):
"""Returns a tuple of (x,y,w,h) for the current menu view."""
return (self.x, self.y, self.width, self.height)
class WindowButtons(UnityIntrospectionObject):
"""The window buttons class"""
def get_buttons(self, visible_only=True):
"""Return a list of window buttons"""
if visible_only:
return self.get_children_by_type(WindowButton, visible=True)
else:
return self.get_children_by_type(WindowButton)
def get_button(self, type):
buttons = self.get_children_by_type(WindowButton, type=type)
assert(len(buttons) == 1)
return buttons[0]
@property
def visible(self):
return len(self.get_buttons()) != 0
@property
def close(self):
return self.get_button("Close")
@property
def minimize(self):
return self.get_button("Minimize")
@property
def unmaximize(self):
return self.get_button("Unmaximize")
@property
def maximize(self):
return self.get_button("Maximize")
@property
def geometry
|
aqavi-paracha/coinsbazar
|
qa/pull-tester/pull-tester.py
|
Python
|
mit
| 8,761
| 0.007191
|
#!/usr/bin/python
import json
from urllib import urlopen
import requests
import getpass
from string import Template
import sys
import os
import subprocess
class RunError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def run(command, **kwargs):
fail_hard = kwargs.pop("fail_hard", True)
# output to /dev/null by default:
kwargs.setdefault("stdout", open('/dev/null', 'w'))
kwargs.setdefault("stderr", open('/dev/null', 'w'))
command = Template(command).substitute(os.environ)
if "TRACE" in os.environ:
if 'cwd' in kwargs:
print("[cwd=%s] %s"%(kwargs['cwd'], command))
else: print(command)
try:
process = subprocess.Popen(command.split(' '), **kwargs)
process.wait()
except KeyboardInterrupt:
process.terminate()
raise
if process.returncode != 0 and fail_hard:
raise RunError("Failed: "+command)
return process.returncode
def checkout_pull(clone_url, commit, out):
# Init
build_dir=os.environ["BUILD_DIR"]
run("umount ${CHROOT_COPY}/proc", fail_hard=False)
run("rsync --delete -apv ${CHROOT_MASTER}/ ${CHROOT_COPY}")
run("rm -rf ${CHROOT_COPY}${SCRIPTS_DIR}")
run("cp -a ${SCRIPTS_DIR} ${CHROOT_COPY}${SCRIPTS_DIR}")
# Merge onto upstream/master
run("rm -rf ${BUILD_DIR}")
run("mkdir -p ${BUILD_DIR}")
run("git clone ${CLONE_URL} ${BUILD_DIR}")
run("git remote add pull "+clone_url, cwd=build_dir, stdout=out, stderr=out)
run("git fetch pull", cwd=build_dir, stdout=out, stderr=out)
if run("git merge "+ commit, fail_hard=False, cwd=build_dir, stdout=out, stderr=out) != 0:
return False
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${BUILD_DIR}", stdout=out, stderr=out)
run("mount --bind /proc ${CHROOT_COPY}/proc")
return True
def commentOn(commentUrl, success, inMerge, needTests, linkUrl):
common_message = """
This test script verifies pulls every time they are updated. It, however, dies sometimes and fails to test properly. If you are waiting on a test, please check timestamps to verify that the test.log is moving at http://jenkins.bluematt.me/pull-tester/current/
Contact BlueMatt on freenode if something looks broken."""
# Remove old CoinsBazarPullTester comments (I'm being lazy and not paginating here)
recentcomments = requests.get(commentUrl+"?sort=created&direction=desc",
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
for comment in recentcomments:
if comment["user"]["login"] == os.environ["GITHUB_USER"] and common_message in comment["body"]:
requests.delete(comment["url"],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
if success == True:
if needTests:
message = "Automatic sanity-testing: PLEASE ADD TEST-CASES, though technically passed. See " + linkUrl + " for binaries and test log."
else:
message = "Automatic sanity-testing: PASSED, see " + linkUrl + " for binaries and test log."
post_data = { "body" : message + common_message}
elif inMerge:
post_data = { "body" : "Automatic sanity-testing: FAILED MERGE, see " + linkUrl + " for test log." + """
This pull does not merge cleanly onto current master""" + common_message}
else:
post_data = { "body" : "Automatic sanity-testing: FAILED BUILD/TEST, see " + linkUrl + " for binaries and test log." + """
This could happen for one of several reasons:
1. It chanages changes build scripts in a way that made them incompatible with the automated testing scripts (please tweak those patches in qa/pull-tester)
2. It adds/modifies tests which test network rules (thanks for doing that), which conflicts with a patch applied at test time
3. It does not build on either Linux i386 or Win32 (via MinGW cross compile)
4. The test suite fails on either Linux i386 or Win32
5. The block test-cases failed (lookup the first bNN identifier which failed in https://github.com/TheBlueMatt/test-scripts/blob/master/FullBlockTestGenerator.java)
If you believe this to be in error, please ping BlueMatt on freenode or TheBlueMatt here.
""" + common_message}
resp = requests.post(commentUrl, json.dumps(post_data), auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
def testpull(number, comment_url, clone_url, commit):
print("Testing pull %d: %s : %s"%(number, clone_url,commit))
dir = os.environ["RESULTS_DIR"] + "/" + commit + "/"
print(" ouput to %s"%dir)
if os.path.exists(dir):
os.system("rm -r " + dir)
os.makedirs(dir)
currentdir = os.environ["RESULTS_DIR"] + "/current"
os.system("rm -r "+currentdir)
os.system("ln -s " + dir + " " + currentdir)
out = open(dir + "test.log", 'w+')
resultsurl = os.environ["RESULTS_URL"] + commit
checkedout = checkout_pull(clone_url, commit, out)
if checkedout != True:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, True, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
return
run("rm -rf ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("mkdir -p ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False)
script = os.environ["BUILD_PATH"]+"/qa/pull-tester/pull-tester.sh"
script += " ${BUILD_PATH} ${MINGW_DEPS_DIR} ${SCRIPTS_DIR}/CoinsBazardComparisonTool_jar/CoinsBazardComparisonTool.jar 0 6 ${OUT_DIR}"
returncode = run("chroot ${CHROOT_COPY} sudo -u ${BUILD_USER} -H timeout ${TEST_TIMEOUT} "+script,
fail_hard=False, stdout=out, stderr=out)
run("mv ${CHROOT_COPY}/${OUT_DIR} " + dir)
run("mv ${BUILD_DIR} " + dir)
if returncode == 42:
print("Successfully tested pull (needs tests) - sending comment to: " + comment_url)
commentOn(comment_url,
|
True, Fal
|
se, True, resultsurl)
elif returncode != 0:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, False, False, resultsurl)
else:
print("Successfully tested pull - sending comment to: " + comment_url)
commentOn(comment_url, True, False, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
def environ_default(setting, value):
if not setting in os.environ:
os.environ[setting] = value
if getpass.getuser() != "root":
print("Run me as root!")
sys.exit(1)
if "GITHUB_USER" not in os.environ or "GITHUB_AUTH_TOKEN" not in os.environ:
print("GITHUB_USER and/or GITHUB_AUTH_TOKEN environment variables not set")
sys.exit(1)
environ_default("CLONE_URL", "https://github.com/bitcoin/bitcoin.git")
environ_default("MINGW_DEPS_DIR", "/mnt/w32deps")
environ_default("SCRIPTS_DIR", "/mnt/test-scripts")
environ_default("CHROOT_COPY", "/mnt/chroot-tmp")
environ_default("CHROOT_MASTER", "/mnt/chroot")
environ_default("OUT_DIR", "/mnt/out")
environ_default("BUILD_PATH", "/mnt/bitcoin")
os.environ["BUILD_DIR"] = os.environ["CHROOT_COPY"] + os.environ["BUILD_PATH"]
environ_default("RESULTS_DIR", "/mnt/www/pull-tester")
environ_default("RESULTS_URL", "http://jenkins.bluematt.me/pull-tester/")
environ_default("GITHUB_REPO", "bitcoin/bitcoin")
environ_default("TESTED_DB", "/mnt/commits-tested.txt")
environ_default("BUILD_USER", "matt")
environ_default("BUILD_GROUP", "matt")
environ_default("TEST_TIMEOUT", str(60*60*2))
print("Optional usage: pull-tester.py 2112")
f = open(os.environ["TESTED_DB"])
tested = set( line.rstrip() for line in f.readlines() )
f.close()
if len(sys.argv) > 1:
pull = requests.get("https://api.github.com/repos/"+os.environ["GITHUB_REPO"]+"/pulls/"+sys.argv[1],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
testpull(pull["number"], pull["_links"]["comments"]["href"],
pull["head"]["repo"]["clone_url"], pull["head"]["sha"])
else:
for page in range(1,100
|
tylertian/Openstack
|
openstack F/nova/nova/volume/nexenta/volume.py
|
Python
|
apache-2.0
| 11,026
| 0.001179
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Nexenta Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nexenta.volume` -- Driver to store volumes on Nexenta Appliance
=====================================================================
.. automodule:: nexenta.volume
.. moduleauthor:: Yuriy Taraday <yorik.sar@gmail.com>
"""
from nova import exception
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova.volume import driver
from nova.volume import nexenta
from nova.volume.nexenta import jsonrpc
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
nexenta_opts = [
cfg.StrOpt('nexenta_host',
default='',
help='IP address of Nexenta SA'),
cfg.IntOpt('nexenta_rest_port',
default=2000,
help='HTTP port to connect to Nexenta REST API server'),
cfg.StrOpt('nexenta_rest_protocol',
defaul
|
t='auto',
help='Use http or https for REST connection (default auto)'),
cfg.StrOpt('nexenta_user',
default='admin',
help='User name to connect to Nexenta SA'),
cfg.StrOpt('nexenta_password',
default='nexenta',
help='Password to connect to Nexenta SA'),
cfg.IntOpt('nexenta_iscsi_target_portal_port',
default=3260,
|
help='Nexenta target portal port'),
cfg.StrOpt('nexenta_volume',
default='nova',
help='pool on SA that will hold all volumes'),
cfg.StrOpt('nexenta_target_prefix',
default='iqn.1986-03.com.sun:02:nova-',
help='IQN prefix for iSCSI targets'),
cfg.StrOpt('nexenta_target_group_prefix',
default='nova/',
help='prefix for iSCSI target groups on SA'),
cfg.StrOpt('nexenta_blocksize',
default='',
help='block size for volumes (blank=default,8KB)'),
cfg.BoolOpt('nexenta_sparse',
default=False,
help='flag to create sparse volumes'),
]
FLAGS.register_opts(nexenta_opts)
class NexentaDriver(driver.ISCSIDriver): # pylint: disable=R0921
"""Executes volume driver commands on Nexenta Appliance."""
def __init__(self):
super(NexentaDriver, self).__init__()
def do_setup(self, context):
protocol = FLAGS.nexenta_rest_protocol
auto = protocol == 'auto'
if auto:
protocol = 'http'
self.nms = jsonrpc.NexentaJSONProxy(
'%s://%s:%s/rest/nms/' % (protocol, FLAGS.nexenta_host,
FLAGS.nexenta_rest_port),
FLAGS.nexenta_user, FLAGS.nexenta_password, auto=auto)
def check_for_setup_error(self):
"""Verify that the volume for our zvols exists.
:raise: :py:exc:`LookupError`
"""
if not self.nms.volume.object_exists(FLAGS.nexenta_volume):
raise LookupError(_("Volume %s does not exist in Nexenta SA"),
FLAGS.nexenta_volume)
@staticmethod
def _get_zvol_name(volume_name):
"""Return zvol name that corresponds given volume name."""
return '%s/%s' % (FLAGS.nexenta_volume, volume_name)
@staticmethod
def _get_target_name(volume_name):
"""Return iSCSI target name to access volume."""
return '%s%s' % (FLAGS.nexenta_target_prefix, volume_name)
@staticmethod
def _get_target_group_name(volume_name):
"""Return Nexenta iSCSI target group name for volume."""
return '%s%s' % (FLAGS.nexenta_target_group_prefix, volume_name)
def create_volume(self, volume):
"""Create a zvol on appliance.
:param volume: volume reference
"""
self.nms.zvol.create(
self._get_zvol_name(volume['name']),
'%sG' % (volume['size'],),
FLAGS.nexenta_blocksize, FLAGS.nexenta_sparse)
def delete_volume(self, volume):
"""Destroy a zvol on appliance.
:param volume: volume reference
"""
try:
self.nms.zvol.destroy(self._get_zvol_name(volume['name']), '')
except nexenta.NexentaException as exc:
if "zvol has children" in exc.args[1]:
raise exception.VolumeIsBusy
else:
raise
def create_snapshot(self, snapshot):
"""Create snapshot of existing zvol on appliance.
:param snapshot: shapshot reference
"""
self.nms.zvol.create_snapshot(
self._get_zvol_name(snapshot['volume_name']),
snapshot['name'], '')
def create_volume_from_snapshot(self, volume, snapshot):
"""Create new volume from other's snapshot on appliance.
:param volume: reference of volume to be created
:param snapshot: reference of source snapshot
"""
self.nms.zvol.clone(
'%s@%s' % (self._get_zvol_name(snapshot['volume_name']),
snapshot['name']),
self._get_zvol_name(volume['name']))
def delete_snapshot(self, snapshot):
"""Delete volume's snapshot on appliance.
:param snapshot: shapshot reference
"""
try:
self.nms.snapshot.destroy(
'%s@%s' % (self._get_zvol_name(snapshot['volume_name']),
snapshot['name']),
'')
except nexenta.NexentaException as exc:
if "snapshot has dependent clones" in exc.args[1]:
raise exception.SnapshotIsBusy
else:
raise
def local_path(self, volume):
"""Return local path to existing local volume.
We never have local volumes, so it raises NotImplementedError.
:raise: :py:exc:`NotImplementedError`
"""
LOG.error(_("Call to local_path should not happen."
" Verify that use_local_volumes flag is turned off."))
raise NotImplementedError
def _do_export(self, _ctx, volume, ensure=False):
"""Do all steps to get zvol exported as LUN 0 at separate target.
:param volume: reference of volume to be exported
:param ensure: if True, ignore errors caused by already existing
resources
:return: iscsiadm-formatted provider location string
"""
zvol_name = self._get_zvol_name(volume['name'])
target_name = self._get_target_name(volume['name'])
target_group_name = self._get_target_group_name(volume['name'])
try:
self.nms.iscsitarget.create_target({'target_name': target_name})
except nexenta.NexentaException as exc:
if not ensure or 'already configured' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored target creation error "%s"'
' while ensuring export'), exc)
try:
self.nms.stmf.create_targetgroup(target_group_name)
except nexenta.NexentaException as exc:
if not ensure or 'already exists' not in exc.args[1]:
raise
else:
LOG.info(_('Ignored target group creation error "%s"'
' while ensuring export'), exc)
try:
self.nms.stmf.add_targetgroup_member(target_group_name,
target_name)
except nexenta.NexentaException as exc:
if not ensure or 'already exists' not in exc.args[1]:
rai
|
sadanandb/pmt
|
src/pyasm/prod/biz/texture.py
|
Python
|
epl-1.0
| 5,425
| 0.00977
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['Texture', 'TextureSource', 'ShotTexture']
from pyasm.search import *
from pyasm.biz import Project
class Texture(SObject):
SEARCH_TYPE = "prod/texture"
def get_relation(my, name):
from asset import Asset
relations = {}
relations['asset'] = Asset
relations['texture'] = Texture
return relations[name]
def get_icon_context(my, context=None):
return "publish"
# static functions
def create(cls, asset, code=None, category=None, description=None, sobject_context=None):
sobject = SearchType.create( cls.SEARCH_TYPE )
asset_code = asset.get_code()
#asset_code = asset.get_code()
sobject.set_value("asset_code", asset.get_code())
if sobject_context != None:
sobject.set_value("asset_context", sobject_context)
if code != None:
sobject.set_value("code", code)
if category != None:
sob
|
ject.set_value("category", category)
if description != None
|
:
sobject.set_value("description", description)
sobject.commit()
return sobject
create = classmethod(create)
def get(cls, texture_code, parent_code, project_code=None, is_multi=False):
'''TODO: use search_type, id for the parent search'''
if not project_code:
project_code = Project.get_project_code()
search = Search( cls.SEARCH_TYPE, project_code )
#search.set_show_retired(True)
if texture_code:
search.add_filter('code', texture_code)
search.add_filter('asset_code', parent_code)
search_type = search.get_search_type()
key = "%s|%s|%s" % (search_type, texture_code, parent_code)
sobj = cls.get_by_search(search, key, is_multi=is_multi)
return sobj
get = classmethod(get)
class TextureSource(Texture):
SEARCH_TYPE = "prod/texture_source"
def create(cls, asset_code, code=None, category=None, description=None, sobject_context=None):
sobject = SearchType.create( cls.SEARCH_TYPE )
sobject.set_value("asset_code", asset_code)
if sobject_context != None:
sobject.set_value("asset_context", sobject_context)
if code != None:
sobject.set_value("code", code)
if category != None:
sobject.set_value("category", category)
if description != None:
sobject.set_value("description", description)
sobject.commit()
return sobject
create = classmethod(create)
class ShotTexture(Texture):
SEARCH_TYPE = "prod/shot_texture"
def get_shot_code(my):
shot_code = ''
search_type = my.get_value('search_type')
search = Search( search_type )
search.add_filter( 'id', my.get_value('search_id') )
parent = search.get_sobject()
if not parent:
return shot_code
if search_type.startswith('prod/shot_instance'):
shot_code = parent.get_value('shot_code')
else:
shot_code = parent.get_value('code')
return shot_code
# static functions
def create(cls, sobject, code=None, category=None, description=None, sobject_context=None):
texture = SearchType.create( cls.SEARCH_TYPE )
texture.set_value("search_type", sobject.get_search_type() )
texture.set_value("search_id", sobject.get_id())
#texture.set_value("shot_code", shot_code)
if sobject_context != None:
texture.set_value("asset_context", sobject_context)
if code != None:
texture.set_value("code", code)
if category != None:
texture.set_value("category", category)
if description != None:
texture.set_value("description", description)
texture.commit()
return texture
create = classmethod(create)
def get(cls, texture_code, parent_code, project_code=None, is_multi=False):
if not project_code:
project_code = Project.get_project_code()
search = Search( cls.SEARCH_TYPE, project_code )
#search.set_show_retired(True)
if texture_code:
search.add_filter('code', texture_code)
# backward compatible with using shot code
if isinstance(parent_code, basestring):
from pyasm.prod.biz import Shot
parent = Shot.get_by_code(parent_code)
else:
parent = parent_code
if not parent:
if is_multi:
return []
else:
return None
search.add_filter('search_type', parent.get_search_type())
search.add_filter('search_id', parent.get_id())
parent_key = SearchKey.get_by_sobject(parent)
search_type = search.get_search_type()
key = "%s|%s|%s" % (search_type, texture_code, parent_key)
sobj = cls.get_by_search(search, key, is_multi=is_multi)
return sobj
get = classmethod(get)
|
Shekharrajak/django-db-mailer
|
dbmail/management/commands/dbmail_test_send.py
|
Python
|
gpl-2.0
| 1,678
| 0.004172
|
import re
import optparse
from django.core.management.base import BaseCommand
from dbmail.models import MailTemplate
from dbmail.defaults import BACKEND
from dbmail import db_sender
def
|
send_test_msg(pk, email, user=None, **kwargs):
template = MailTemplate.objects.get(pk=pk)
slug = template.slug
var_list = re.findall('\{\{\s?(\w+)\s?\}\}', template.message)
context = {}
for var in var_list:
context[var] = '%s' % var.upper().replace('_', '-')
return db_sender(slug, email, user, context, **kwargs)
class Command(BaseCommand):
option_list = BaseCommand.opt
|
ion_list + (
optparse.make_option('--email', dest='email', help='Recipients'),
optparse.make_option('--pk', dest='pk', help='DBMail template id'),
optparse.make_option('--without-celery', action='store_true',
default=False, dest='celery',
help='Send direct message'),
optparse.make_option('--provider', dest='provider', help='Provider'),
optparse.make_option(
'--backend', dest='backend', help='Backend', default='mail'),
)
@staticmethod
def get_kwargs(options):
kwargs = {
'use_celery': not options['celery'],
'backend': BACKEND['mail']}
if options['provider']:
kwargs['provider'] = options['provider']
if options['backend']:
kwargs['backend'] = BACKEND[options['backend']]
return kwargs
def handle(self, *args, **options):
send_test_msg(
options['pk'], options['email'], **self.get_kwargs(options)
)
print "Done. Message was sent."
|
QC-Technologies/HRMS
|
interview/admin/candidate.py
|
Python
|
gpl-3.0
| 1,067
| 0
|
from django.contrib import admin
class CandidateAdmin(admin.ModelAdmin):
fieldsets = [
(None, {
'fields': ['email', 'first_name', 'last_name', 'gender', 'cv']
}),
|
('Contact Information', {
'classes': ('collapse',),
'fields': ['mobile_phone']
}),
('Address Information', {
'classes': ('collapse',),
'fields': ['address', 'city']
}),
('Additional Information', {
'classes': ('collapse',),
'fields': ['qualification', 'institute', 'experienced']
})
]
def get_fieldsets(self, request, obj=None):
if obj is
|
None:
self.fieldsets[0][1]['fields'] = ['email', 'first_name',
'last_name', 'gender', 'cv']
else:
self.fieldsets[0][1]['fields'] = ['email', 'first_name',
'last_name', 'gender', 'cv',
'status']
return self.fieldsets
|
Tasignotas/topographica_mirror
|
platform/ipython/profile_topo/ipython_notebook_config.py
|
Python
|
bsd-3-clause
| 4,652
| 0.003869
|
# Stripped down configuration file for ipython-notebook in Topographica.
c = get_config()
#------------------------------------------------------------------------------
# NotebookApp configuration
#------------------------------------------------------------------------------
# NotebookApp will inherit config from: BaseIPythonApplication, Application
# The IP address the notebook server will listen on.
# c.NotebookApp.ip = '127.0.0.1'
# The base URL for the notebook server
# c.NotebookApp.base_project_url = '/'
# The port the notebook server will listen on.
# c.NotebookApp.port = 8888
# Whether to prevent editing/execution of notebooks.
# c.NotebookApp.read_only = False
# Whether to open in a browser after starting. The specific browser used is
# platform dependent and determined by the python standard library `webbrowser`
# module, unless it is overridden using the --browser (NotebookApp.browser)
# configuration option.
c.NotebookApp.open_browser = False
# The full path to an SSL/TLS certificate file.
# c.NotebookApp.certfile = u''
# Hashed password to use for web authentication.
#
# To generate, type in a python/IPython shell:
#
# from IPython.lib import passwd; passwd()
#
# The string should be of the form type:salt:hashed-password.
# c.NotebookApp.password = u''
# The full path to a private key file for usage with SSL/TLS.
# c.NotebookApp.keyfile = u''
#------------------------------------------------------------------------------
# IPKernelApp configuration
#------------------------------------------------------------------------------
# IPython: an enhanced interactive Python shell.
# IPKernelApp will inherit config from: KernelApp, BaseIPythonApplication,
# Application, InteractiveShellApp
# Execute the given command string.
# c.IPKernelApp.code_to_run = ''
# lines of code to run at IPython startup.
# c.IPKernelApp.exec_lines = []
# A file to be run
# c.IPKernelApp.file_to_run = ''
# List of files to run at IPython startup.
# c.IPKernelApp.exec_files = []
# dotted module name of an IPython extension to load.
c.IPKernelApp.extra_extension = 'topo.misc.ipython'
#------------------------------------------------------------------------------
# InlineBackend configuration
#------------------------------------------------------------------------------
# An object to store configuration of the inline backend.
# The image format for figures with the inline backend.
# c.InlineBackend.figure_format = 'png'
# Close all figures at the end of each cell.
#
# When True, ensures that each cell starts with no active figures, but it also
# means that one must keep track of references in order to edit or redraw
# figures in subsequent cells. This mode is ideal for the notebook, where
# residual plots from other cells might be surprising.
#
# When False, one must call figure() to create new figures. This means that
# gcf() and getfigs() can reference figures created in other cells, and the
# active figure can continue to be edited with pylab/pyplot methods that
# reference the current active figure. This mode facilitates iterative editing
# of figures, and behaves most consistently with other matplotlib backends, but
# figure barriers between cells must be explicit.
# c.InlineBackend.close_figures = True
# Subset of matplotlib rcParams that should be different for the inline backend.
# c.InlineBackend.rc = {'font.size': 10, 'savefig.dpi': 72, 'figure.figsize': (6.0, 4.0), 'figure.subplot.bottom': 0.125}
#------------------------------------------------------------------------------
# MappingKernelManager configuration
#-----------------------------------------------------------------------
|
-------
# A KernelManager that handles notebok mapping and HTTP error handling
# MappingKernelManager will inherit config from: MultiKern
|
elManager
# The max raw message size accepted from the browser over a WebSocket
# connection.
# c.MappingKernelManager.max_msg_size = 65536
# Kernel heartbeat interval in seconds.
# c.MappingKernelManager.time_to_dead = 3.0
# Delay (in seconds) before sending first heartbeat.
# c.MappingKernelManager.first_beat = 5.0
#------------------------------------------------------------------------------
# NotebookManager configuration
#------------------------------------------------------------------------------
# Automatically create a Python script when saving the notebook.
#
# For easier use of import, %run and %load across notebooks, a <notebook-
# name>.py script will be created next to any <notebook-name>.ipynb on each
# save. This can also be set with the short `--script` flag.
# c.NotebookManager.save_script = False
|
common-workflow-language/cwl-upgrader
|
setup.py
|
Python
|
apache-2.0
| 2,104
| 0
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup
SETUP_DIR = os.path.dirname(__file__)
README = os.path.join(SETUP_DIR, "README.rst")
NEEDS_PYTEST = {"pytest", "test", "ptr"}.intersection(sys.argv)
PYTEST_RUNNER = ["pytest-runner", "pytest-cov"] if NEEDS_PYTEST else []
setup(
name="cwl-upgrader",
version="1.2.2",
description="Common Workflow Language standalone document upgrader",
long_description=open(README).read(),
author="Common Workflow Language contributors",
author_email="common-workflow-language@googlegroups.com",
url="https://github.com/common-workflow-language/cwl-upgrader",
download_url="https://github.com/common-workflow-language/cwl-upgrader",
license="Apache 2.0",
packages=["cwlupgrader", "cwlupgrader.tests"],
include_package_data=True,
package_dir={"cwlupgrader.tests": "tests"},
package_data={"cwlupgrader.tests": ["../testdata/**/*.cwl"]},
install_requires=[
"setuptools",
"ruamel.yaml >= 0.14.12, < 0.17.21",
"schema_salad",
],
entry_points={"console_scripts": ["cwl-upgrader = cwlupgrader.main:main"]},
python_requires=">=3.
|
6, <4",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software L
|
icense",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
],
zip_safe=True,
setup_requires=PYTEST_RUNNER,
tests_require=["pytest < 7.1.0"],
test_suite="tests",
)
|
pdorrell/emacs-site-lisp
|
test/test-project/src/subdir_with_files/spaced dir name/hello.py
|
Python
|
gpl-2.0
| 45
| 0
|
def hello_again():
print("hello
|
again")
|
|
vorwerkc/pymatgen
|
pymatgen/command_line/bader_caller.py
|
Python
|
mit
| 22,902
| 0.002358
|
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module implements an interface to the Henkelmann et al.'s excellent
Fortran code for calculating a Bader charge analysis.
This module depends on a compiled bader executable available in the path.
Please download the library at http://theory.cm.utexas.edu/vasp/bader/ and
follow the instructions to compile the executable.
If you use this module, please cite the following:
G. Henkelman, A. Arnaldsson, and H. Jonsson, "A fast and robust algorithm for
Bader decomposition of charge density", Comput. Mater. Sci. 36, 254-360 (2006).
"""
import glob
import os
import shutil
import subprocess
import warnings
import numpy as np
from monty.dev import requires
from monty.io import zopen
from monty.os.path import which
from monty.tempfile import ScratchDir
from pymatgen.io.cube import Cube
from pymatgen.io.vasp.inputs import Potcar
from pymatgen.io.vasp.outputs import Chgcar
__author__ = "shyuepingong"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__status__ = "Beta"
__date__ = "4/5/13"
BADEREXE = which("bader") or which("bader.exe")
class BaderAnalysis:
"""
Bader analysis for Cube files and VASP outputs.
.. attribute: data
Atomic data parsed from bader analysis. Essentially a list of dicts
of the form::
[
{
"atomic_vol": 8.769,
"min_dist": 0.8753,
"charge": 7.4168,
"y": 1.1598,
"x": 0.0079,
"z": 0.8348
},
...
]
.. attribute: vacuum_volume
Vacuum volume of the Bader analysis.
.. attribute: vacuum_charge
Vacuum charge of the Bader analysis.
.. attribute: nelectrons
Number of electrons of the Bader analysis.
.. attribute: chgcar
Chgcar object associated with input CHGCAR file.
.. attribute: atomic_densities
list of charge densities for each atom centered on the atom
excess 0's are removed from the array to reduce the size of the array
the charge densities are dicts with the charge density map,
the shift vector applied to move the data to the center, and the original dimension of the charge density map
charge:
{
"data": charge density array
"shift": shift used to center the atomic charge density
"dim": dimension of the original charge density map
}
"""
@requires(
which("bader") or which("bader.exe"),
"BaderAnalysis requires the executable bader to be in the path."
" Please download the library at http://theory.cm.utexas"
".edu/vasp/bader/ and compile the executable.",
)
def __init__(
self,
chgcar_filename=None,
potcar_filename=None,
chgref_filename=None,
parse_atomic_densities=False,
cube_filename=None,
):
"""
Initializes the Bader caller.
Args:
chgcar_filename (str): The filename of the CHGCAR.
parse_atomic_densities (bool): Optional. turns on atomic partition of the charge density
charge densities are atom centered
"""
if not BADEREXE:
raise RuntimeError(
"BaderAnalysis requires the executable bader to be in the path."
" Please download the library at http://theory.cm.utexas"
".edu/vasp/bader/ and compile the executable."
)
if not (cube_filename or chgcar_filename):
raise ValueError("You must provide a file! Either a cube file or a CHGCAR")
if cube_filename and chgcar_filename:
raise ValueError("You cannot parse a cube and a CHGCAR at the same time!")
self.parse_atomic_densities = parse_atomic_densities
if chgcar_filename:
fpath = os.path.abspath(chgcar_filename)
self.is_vasp = True
self.chgcar = Chgcar.from_file(chgcar_filename)
self.structure = self.chgcar.structure
self.potcar = Potcar.from_file(potcar_filename) if potcar_filename is not None else None
self.natoms = self.chgcar.poscar.natoms
chgrefpath = os.path.abspath(chgref_filename) if chgref_filename else None
self.reference_used = bool(chgref_filename)
# List of nelects for each atom from potcar
potcar_indices = []
for i, v in enumerate(self.natoms):
potcar_indices += [i] * v
self.nelects = (
[self.potcar[potcar_indices[i]].nelectrons for i in range(len(self.structure))] if self.potcar else []
)
else:
fpath = os.path.abspath(cube_filename)
self.is_vasp = False
self.cube = Cube(fpath)
self.structure = self.cube.structure
self.nelects = None
chgrefpath = os.path.abspath(chgref_filename) if chgref_filename else None
self.reference_used = bool(chgref_filename)
tmpfile = "CHGCAR" if chgcar_filename else "CUBE"
with ScratchDir("."):
with zopen(fpath, "rt") as f_in:
with open(tmpfile, "wt") as f_out:
shutil.copyfileobj(f_in, f_out)
args = [BADEREXE, tmpfile]
if chgref_filename:
with zopen(chgrefpath, "rt") as f_in:
with open("CHGCAR_ref", "wt") as f_out:
shutil.copyfileobj(f_in, f_out)
args += ["-ref", "CHGCAR_ref"]
if parse_atomic_densities:
args += ["-p", "all_atom"]
with subprocess.Popen(args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, close_fds=True) as rs:
stdout, stderr = rs.communicate()
if rs.returncode != 0:
raise RuntimeError(
"bader exited with return code %d. Please check your bader installation." % rs.returncode
)
try:
self.version = float(stdout.split()[5])
except ValueError:
self.version = -1 # Unknown
if self.version < 1.0:
warnings.warn(
"Your installed version of Bader is outdated, calculation of vacuum charge may be incorrect."
)
data = []
with open("ACF.dat") as f:
raw = f.readlines()
headers = ("x", "y", "z", "charge", "min_dist", "atomic_vol")
raw.pop(0)
raw.pop(0)
while True:
l = raw.pop(0).strip()
if l.startswith("-"):
break
vals = map(float, l.split()[1:])
data.append(dict(zip(headers, vals)))
for l in raw:
toks = l.strip().split(":")
if toks[0] == "VACUUM CHARGE":
self.vacuum_charge = float(toks[1])
elif toks[0] == "VACUUM VOLUME":
self.v
|
acuum_volume = float(toks[1])
el
|
if toks[0] == "NUMBER OF ELECTRONS":
self.nelectrons = float(toks[1])
self.data = data
if self.parse_atomic_densities:
# convert the charge denisty for each atom spit out by Bader into Chgcar objects for easy parsing
atom_chgcars = [
Chgcar.from_file(f"BvAt{str(i).zfill(4)}.dat") for i in range(1, len(self.chgcar.structure) + 1)
]
atomic_densities = []
# For each atom in the structure
for atom, loc, chg in zip(
self.chgcar.structure,
self.chgcar.structure.frac_coords,
atom_chgcars,
):
# Find the index of the atom in the charge density atom
index = np.round(np.multiply(loc, chg.dim))
|
AndreaMordenti/spotydowny
|
core/convert.py
|
Python
|
mit
| 2,314
| 0.001729
|
import subprocess
import os
"""
What are the differences and similarities between ffmpeg, libav, and avconv?
https://stackoverflow.com/questions/9477115
ffmeg encoders high to lower quality
libopus > libvorbis >= libfdk_aac > aac > libmp3lame
libfdk_aac due to copyrights needs to be compiled by end user
on MacOS brew install ffmpeg --with-fdk-aac will do just that. Other OS?
https://trac.ffmpeg.org/wiki/Encode/AAC
"""
def song(input_song, output_song, folder, avconv=False, verbose=False):
"""Do the audio format conversion."""
if not input_song == output_song:
print('Converting {0} to {1}'.format(
input_song, output_song.split('.')[-1]))
if avconv:
exit_code = convert_with_avconv(input_song, output_song, folder, verbose)
else:
exit_code = convert_with_ffmpeg(input_song, output_song, folder, verbose)
return exit_code
return 0
def convert_with_avconv(input_song, output_song, folder, verbose):
"""Convert the audio file using avconv."""
if verbose:
level = 'debug'
else:
level = '0'
command = ['avconv',
'-loglevel', level,
'-i', os.path.join(folder, input_song),
'-ab', '192k',
os.path.join(folder, output_song)]
return subprocess.call(command)
def convert_with_ffmpeg(input_song, output_song, folder, verbose):
"""Convert the audio file using FFmpeg."""
ffmpeg_pre = 'ffmpeg -y '
if not verbose:
ffmpeg_pre += '-hide_banner -nostats -v panic '
|
input_ext = input_song.split('.')[-1]
output_ext = output_song.split('.')[-1]
if input_ext == 'm4a':
if out
|
put_ext == 'mp3':
ffmpeg_params = '-codec:v copy -codec:a libmp3lame -q:a 2 '
elif output_ext == 'webm':
ffmpeg_params = '-c:a libopus -vbr on -b:a 192k -vn '
elif input_ext == 'webm':
if output_ext == 'mp3':
ffmpeg_params = ' -ab 192k -ar 44100 -vn '
elif output_ext == 'm4a':
ffmpeg_params = '-cutoff 20000 -c:a libfdk_aac -b:a 192k -vn '
command = '{0}-i {1} {2}{3}'.format(
ffmpeg_pre, os.path.join(folder, input_song), ffmpeg_params, os.path.join(folder, output_song)).split(' ')
return subprocess.call(command)
|
winnerineast/Origae-6
|
origae/dataset/text/classification/forms.py
|
Python
|
gpl-3.0
| 14,708
| 0.001904
|
from __future__ import absolute_import
import os.path
import requests
import wtforms
from wtforms import validators
from ..forms import TextDatasetForm
from origae import utils
from origae.utils.forms import validate_required_iff, validate_greater_than
class TextClassificationDatasetForm(TextDatasetForm):
"""
Defines the form used to create a new TextClassificationDatasetJob
"""
backend = wtforms.SelectField('DB backend',
choices=[
('lmdb', 'LMDB'),
('hdf5', 'HDF5')
],
default='lmdb',
)
def validate_backend(form, field):
if field.data == 'lmdb':
form.compression.data = 'none'
elif field.data == 'tfrecords':
form.compression.data = 'none'
elif field.data == 'hdf5':
form.encoding.data = 'none'
compression = utils.forms.SelectField(
'DB compression',
choices=[
('none', 'None'),
('gzip', 'GZIP'),
],
default='none',
tooltip=('Compressing the dataset may significantly decrease the size '
'of your database files, but it may increase read and write times.'),
)
# Use a SelectField instead of a HiddenField so that the default value
# is used when nothing is provided (through the REST API)
method = wtforms.SelectField(u'Dataset type',
choices=[
('folder', 'Folder'),
('textfile', 'Textfiles'),
],
default='folder',
)
def validate_folder_path(form, field):
if not field.data:
pass
elif utils.is_url(field.data):
# make sure the URL exists
try:
r = requests.get(field.data,
allow_redirects=False,
timeout=utils.HTTP_TIMEOUT)
if r.status_code not in [requests.codes.ok, requests.codes.moved, requests.codes.found]:
raise validators.ValidationError('URL not found')
except Exception as e:
raise validators.ValidationError('Caught %s while checking URL: %s' % (type(e).__name__, e))
else:
return True
else:
# make sure the filesystem path exists
# and make sure the filesystem path is absolute
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError('Folder does not exist')
elif not os.path.isabs(field.data):
raise validators.ValidationError('Filesystem path is not absolute')
else:
return True
#
# Method - folder
#
folder_train = utils.forms.StringField(
u'Training Images',
validators=[
validate_required_iff(method='folder'),
validate_folder_path,
],
tooltip=('Indicate a folder which holds subfolders full of images. '
'Each subfolder should be named according to the desired label for the images that it holds. '
'Can also be a URL for an apache/nginx auto-indexed folder.'),
)
folder_pct_val = utils.forms.IntegerField(
u'% for validation',
default=25,
validators=[
validate_required_iff(method='folder'),
validators.NumberRange(min=0, max=100)
],
tooltip=('You can choose to set apart a certain percentage of images '
'from the training images for the validation set.'),
)
folder_pct_test = utils.forms.IntegerField(
u'% for testing',
default=0,
validators=[
validate_required_iff(method='folder'),
validators.NumberRange(min=0, max=100)
],
tooltip=('You can choose to set apart a certain percentage of images '
'from the training images for the test set.'),
)
folder_train_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1),
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer samples than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_train_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[
validators.Optional(),
validators.NumberRange(min=1),
validate_greater_than('folder_train_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
has_val_folder = wtforms.BooleanField(
'Separate validation images folder',
default=False,
validators=[
validate_required_iff(method='folder')
]
)
folder_val = wtforms.StringField(
u'Validation Images',
validators=[
validate_required_iff(
|
method='folder',
has_val_folder=True),
]
)
folder_val_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1),
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer sample
|
s than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_val_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[
validators.Optional(),
validators.NumberRange(min=1),
validate_greater_than('folder_val_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
has_test_folder = wtforms.BooleanField(
'Separate test images folder',
default=False,
validators=[
validate_required_iff(method='folder')
]
)
folder_test = wtforms.StringField(
u'Test Images',
validators=[
validate_required_iff(
method='folder',
has_test_folder=True),
validate_folder_path,
]
)
folder_test_min_per_class = utils.forms.IntegerField(
u'Minimum samples per class',
default=2,
validators=[
validators.Optional(),
validators.NumberRange(min=1)
],
tooltip=('You can choose to specify a minimum number of samples per class. '
'If a class has fewer samples than the specified amount it will be ignored. '
'Leave blank to ignore this feature.'),
)
folder_test_max_per_class = utils.forms.IntegerField(
u'Maximum samples per class',
validators=[
validators.Optional(),
validators.NumberRange(min=1),
validate_greater_than('folder_test_min_per_class'),
],
tooltip=('You can choose to specify a maximum number of samples per class. '
'If a class has more samples than the specified amount extra samples will be ignored. '
'Leave blank to ignore this feature.'),
)
#
# Method - textfile
#
textfile_use_local_files = wtforms.BooleanFiel
|
ragupta-git/ImcSdk
|
imcsdk/__init__.py
|
Python
|
apache-2.0
| 1,616
| 0.001238
|
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import logging
import logging.handlers
log = logging.getLogger('imc')
console = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s
|
- %(name)s - %(levelname)s - %(message)s')
console.setFormatter(formatter)
def enable_file_logging(filename="imcsdk.log"):
file_handler = logging.handlers.RotatingFileHandler(
filename, maxBytes=10*1024*1024, backupCount=5)
log.addHandler(file_handler)
def set_log_level(level=logging.DEBUG):
"""
Allows setting log level
Args:
level: logging level - import logging and pass enums from it(INFO/DEBUG/ERROR/etc..)
Returns:
None
|
Example:
from imcsdk import set_log_level
import logging
set_log_level(logging.INFO)
"""
log.setLevel(level)
console.setLevel(level)
set_log_level(logging.DEBUG)
log.addHandler(console)
if os.path.exists('/tmp/imcsdk_debug'):
enable_file_logging()
__author__ = 'Cisco Systems'
__email__ = 'ucs-python@cisco.com'
__version__ = '0.9.3.1'
|
Rajeshkumar90/ansible-modules-extras
|
source_control/bzr.py
|
Python
|
gpl-3.0
| 6,658
| 0.001954
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, André Paramés <git@andreparames.com>
# Based on the Git module by Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = u'''
---
module: bzr
author: "André Paramés (@andreparames)"
version_added: "1.1"
short_description: Deploy software (or files) from bzr branches
description:
- Manage I(bzr) branches to deploy files or software.
options:
name:
required: true
aliases: [ 'parent' ]
description:
- SSH or HTTP protocol address of the parent branch.
dest:
required: true
description:
- Absolute path of where the branch should be cloned to.
version:
required: false
default: "head"
description:
- What version of the branch to clone. This can be the
bzr revno or revid.
force:
required: false
default: "no"
choices: [ 'yes', 'no' ]
description:
- If C(yes), any modified files in the working
tree will be discarded. Before 1.9 the default
value was "yes".
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to bzr executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
'''
EXAMPLES = '''
# Example bzr checkout from Ansible Playbooks
- bzr:
name: 'bzr+ssh://foosball.example.org/path/to/branch'
dest: /srv/checkout
version: 22
'''
import re
class Bzr(object):
def __init__(self, module, parent, dest, version, bzr_path):
self.module = module
|
self.parent = parent
self.dest = dest
self.version = ver
|
sion
self.bzr_path = bzr_path
def _command(self, args_list, cwd=None, **kwargs):
(rc, out, err) = self.module.run_command([self.bzr_path] + args_list, cwd=cwd, **kwargs)
return (rc, out, err)
def get_version(self):
'''samples the version of the bzr branch'''
cmd = "%s revno" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
revno = stdout.strip()
return revno
def clone(self):
'''makes a new bzr branch if it does not already exist'''
dest_dirname = os.path.dirname(self.dest)
try:
os.makedirs(dest_dirname)
except:
pass
if self.version.lower() != 'head':
args_list = ["branch", "-r", self.version, self.parent, self.dest]
else:
args_list = ["branch", self.parent, self.dest]
return self._command(args_list, check_rc=True, cwd=dest_dirname)
def has_local_mods(self):
cmd = "%s status -S" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
lines = stdout.splitlines()
lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines)
return len(lines) > 0
def reset(self, force):
'''
Resets the index and working tree to head.
Discards any changes to tracked files in the working
tree since that commit.
'''
if not force and self.has_local_mods():
self.module.fail_json(msg="Local modifications exist in branch (force=no).")
return self._command(["revert"], check_rc=True, cwd=self.dest)
def fetch(self):
'''updates branch from remote sources'''
if self.version.lower() != 'head':
(rc, out, err) = self._command(["pull", "-r", self.version], cwd=self.dest)
else:
(rc, out, err) = self._command(["pull"], cwd=self.dest)
if rc != 0:
self.module.fail_json(msg="Failed to pull")
return (rc, out, err)
def switch_version(self):
'''once pulled, switch to a particular revno or revid'''
if self.version.lower() != 'head':
args_list = ["revert", "-r", self.version]
else:
args_list = ["revert"]
return self._command(args_list, check_rc=True, cwd=self.dest)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
dest=dict(required=True, type='path'),
name=dict(required=True, aliases=['parent']),
version=dict(default='head'),
force=dict(default='no', type='bool'),
executable=dict(default=None),
)
)
dest = module.params['dest']
parent = module.params['name']
version = module.params['version']
force = module.params['force']
bzr_path = module.params['executable'] or module.get_bin_path('bzr', True)
bzrconfig = os.path.join(dest, '.bzr', 'branch', 'branch.conf')
rc, out, err, status = (0, None, None, None)
bzr = Bzr(module, parent, dest, version, bzr_path)
# if there is no bzr configuration, do a branch operation
# else pull and switch the version
before = None
local_mods = False
if not os.path.exists(bzrconfig):
(rc, out, err) = bzr.clone()
else:
# else do a pull
local_mods = bzr.has_local_mods()
before = bzr.get_version()
(rc, out, err) = bzr.reset(force)
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = bzr.fetch()
if rc != 0:
module.fail_json(msg=err)
# switch to version specified regardless of whether
# we cloned or pulled
(rc, out, err) = bzr.switch_version()
# determine if we changed anything
after = bzr.get_version()
changed = False
if before != after or local_mods:
changed = True
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
SpaceKatt/CSPLN
|
apps/scaffolding/win/web2py/gluon/contrib/mockimaplib.py
|
Python
|
gpl-3.0
| 10,569
| 0.002933
|
# -*- encoding: utf-8 -*-
from imaplib import ParseFlags
# mockimaplib: A very simple mock server module for imap client APIs
# Copyright (C) 2014 Alan Etkin <spametki@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or(at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/lgpl.html>
"""
mockimaplib allows you to test applications connecting to a dummy imap
service. For more details on the api subset implemented,
refer to the imaplib docs.
The client should configure a dictionary to map imap string queries to sets
of entries stored in a message dummy storage dictionary. The module includes
a small set of default message records (SPAM and MESSAGES), two mailboxes
(Draft and INBOX) and a list of query/resultset entries (RESULTS).
Usage:
>>> import mockimaplib
>>> connection = mockimaplib.IMAP4_SSL(<host>)
>>> connection.login(<user>, <password>)
None
>>> connection.select("INBOX")
("OK", ... <mailbox length>)
# fetch commands specifying single uid or message id
# will try to get messages recorded in SPAM
>>> connection.uid(...)
<search query or fetch result>
# returns a string list of matching message ids
>>> connection.search(<query>)
("OK", ... "1 2 ... n")
"""
MESSAGES = (
'MIME-Version: 1.0\r\nReceived: by 10.140.91.199 with HTTP; Mon, 27 Jan 2014 13:52:30 -0800 (PST)\r\nDate: Mon, 27 Jan 2014 19:52:30 -0200\r\nDelivered-To: nurse@example.com\r\nMessage-ID: <10101010101010010000010101010001010101001010010000001@mail.example.com>\r\nSubject: spam1\r\nFrom: Mr. Gumby <gumby@example.com>\r\nTo: The nurse <nurse@example.com>\r\nContent-Type: text/plain; charset=ISO-8859-1\r\n\r\nNurse!\r\n\r\n\r\n',
'MIME-Version: 1.0\r\nReceived: by 10.140.91.199 with HTTP; Mon, 27 Jan 2014 13:52:47 -0800 (PST)\r\nDate: Mon, 27 Jan 2014 19:52:47 -0200\r\nDelivered-To: nurse@example.com\r\nMessage-ID: <101010101010100100000101010100010101010010100100000010@mail.example.com>\r\nSubject: spam2\r\nFrom: Mr. Gumby <gumby@example.com>\r\nTo: The nurse <nurse@example.com>\r\nContent-Type: text/plain; charset=ISO-8859-1\r\n\r\nNurse, nurse!',
'MIME-Version: 1.0\r\nReceived: by 10.140.91.199 with HTTP; Mon, 27 Jan 2014 13:54:54 -0800 (PST)\r\nDate: Mon, 27 Jan 2014 19:54:54 -0200\r\nDelivered-To: nurse@example.com\r\nMessage-ID: <1010101010101001000001010101000101010100101001000000101@mail.example.com>\r\nSubject: spamalot1\r\nFrom: Mr. Gumby <gumby@example.com>\r\nTo: The nurse <nurse@example.com>\r\nContent-Type: text/plain; charset=ISO-8859-1\r\n\r\nNurse!\r\n\r\n\r\n',
'MIME-Version: 1.0\r\n\r\nReceived: by 10.140.91.199 with HTTP; Mon, 27 Jan 2014 13:54:54 -0800 (PST)\r\nDate: Mon, 27 Jan 2014 19:54:54 -0200\r\nDelivered-To: nurse@example.com\r\nMessage-ID: <101010101010100100000101010100010101010010100100000010101@mail.example.com>\r\nSubject: spamalot2\r\nFrom: Mr. Gumby <gumby@example.com>\r\nTo: The nurse <nurse@example.com>\r\nContent-Type: text/plain; charset=ISO-8859-1\r\n\r\nNurse! ... Nurse! ... Nurse!\r\n\r\n\r\n')
SPAM = {
"INBOX": [
{"uid": "483209",
"headers": MESSAGES[0],
"complete": MESSAGES[0],
"flags": ""},
{"uid": "483211",
"headers": MESSAGES[1],
"complete": MESSAGES[1],
"flags": ""},
{"uid": "483225",
"headers": MESSAGES[2],
"complete": MESSAGES[2],
"flags": ""}],
"Draft":[
{"uid": "483432",
"headers": MESSAGES[3],
"complete": MESSAGES[3],
"flags": ""},]
}
RESULTS = {
# <query string>: [<str uid> | <long id>, ...]
"INBOX": {
"(ALL)": (1, 2, 3),
"(1:3)": (1, 2, 3)},
"Draft": {
"(1:1)": (1,)},
}
class Connection(object):
"""Dummy connection object for the imap client.
By default, uses the module SPAM and RESULT
sets (use Connection.setup for custom values)"""
def login(self, user, password):
pass
def __init__(self):
self._readonly = False
self._mailbox = None
self.setup()
def list(self):
return ('OK', ['(\\HasNoChildren) "/" "%s"' % key for key in self.spam])
def select(self, tablename, readonly=False):
self._readonly = readonly
"""args: mailbox, boolean
result[1][0] -> int last message id / mailbox lenght
result[0] = 'OK'
"""
self._mailbox = tablename
return ('OK', (len(SPAM[self._mailbox]), None))
def uid(self, command, uid, arg):
""" args:
command: "search" | "fetch"
uid
|
: None | uid
parts: "(ALL)" | "(RFC822 FLAGS)" | "(RFC822.HEADER FLAGS)"
|
"search", None, "(ALL)" -> ("OK", ("uid_1 uid_2 ... uid_<mailbox length>", None))
"search", None, "<query>" -> ("OK", ("uid_1 uid_2 ... uid_n", None))
"fetch", uid, parts -> ("OK", (("<id> ...", "<raw message as specified in parts>"), "<flags>")
[0] [1][0][0] [1][0][1] [1][1]
"""
if command == "search":
return self._search(arg)
elif command == "fetch":
return self._fetch(uid, arg)
def _search(self, query):
return ("OK", (" ".join([str(item["uid"]) for item in self._get_messages(query)]), None))
def _fetch(self, value, arg):
try:
message = self.spam[self._mailbox][value - 1]
message_id = value
except TypeError:
for x, item in enumerate(self.spam[self._mailbox]):
if item["uid"] == value:
message = item
message_id = x + 1
break
parts = "headers"
if arg in ("(ALL)", "(RFC822 FLAGS)"):
parts = "complete"
return ("OK", (("%s " % message_id, message[parts]), message["flags"]))
def _get_messages(self, query):
if query.strip().isdigit():
return [self.spam[self._mailbox][int(query.strip()) - 1],]
elif query[1:-1].strip().isdigit():
return [self.spam[self._mailbox][int(query[1:-1].strip()) -1],]
elif query[1:-1].replace("UID", "").strip().isdigit():
for item in self.spam[self._mailbox]:
if item["uid"] == query[1:-1].replace("UID", "").strip():
return [item,]
messages = []
try:
for m in self.results[self._mailbox][query]:
try:
self.spam[self._mailbox][m - 1]["id"] = m
messages.append(self.spam[self._mailbox][m - 1])
except TypeError:
for x, item in enumerate(self.spam[self._mailbox]):
if item["uid"] == m:
item["id"] = x + 1
messages.append(item)
break
except IndexError:
# message removed
pass
return messages
except KeyError:
raise ValueError("The client issued an unexpected query: %s" % query)
def setup(self, spam={}, results={}):
"""adds custom message and query databases or sets
the values to the module defaults.
"""
self.spam = spam
self.results = results
if not spam:
for key in SPAM:
self.spam[key] = []
for d in SPAM[key]:
self.spam[key].append(d.copy())
if not results:
for key in RESULTS:
self.results[key] = RESULTS[key].copy()
def search(self, first, query):
|
PlanTool/plantool
|
wrappingPlanners/Deterministic/LAMA/seq-sat-lama/lama/translate/pddl/f_expression.py
|
Python
|
gpl-2.0
| 5,321
| 0.008081
|
#######################################################################
#
# Author: Gabi Roeger
# Modified by: Silvia Richter (silvia.richter@nicta.com.au)
# (C) Copyright 2008: Gabi Roeger and NICTA
#
# This file is part of LAMA.
#
# LAMA is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the license, or (at your option) any later version.
#
# LAMA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#######################################################################
import string
import conditions
def parse_expression(exp):
if isinstance(exp, list):
functionsymbol = exp[0]
return PrimitiveNumericExpression(functionsymbol,
[conditions.parse_term(arg) for arg in exp[1:]])
elif exp.replace(".","").isdigit():
return NumericConstant(string.atof(exp))
else:
return PrimitiveNumericExpression(exp,[])
def parse_assignment(alist):
assert len(alist) == 3
op = alist[0]
head = parse_expression(alist[1])
exp = parse_expression(alist[2])
if op == "=":
return Assign(head, exp)
elif op == "increase":
return Increase(head, exp)
else:
assert False, "Assignment operator not supported."
class FunctionalExpression(object):
def __init__(self, parts):
self.parts = tuple(parts)
def dump(self, indent=" "):
print "%s%s" % (indent, self._dump())
for part in self.parts:
part.dump(indent + " ")
def _dump(self):
return self.__class__.__name__
def instantiate(self, var_mapping, init_facts):
raise ValueError("Cannot instantiate condition: not normalized")
class NumericConstant(FunctionalExpression):
parts = ()
def __init__(self, value):
self.value = value
def __eq__(self, other):
return (self.__class__ == other.__class__ and self.value == other.value)
def __str__(self):
return "%s %s" % (self.__class__.__name__, self.value)
def _dump(self):
return str(self)
def instantiate(self, var_mapping, init_facts):
return self
class PrimitiveNumericExpression(FunctionalExpression):
parts = ()
def __init__(self, symbol, args):
self.symbol = symbol
self.args = tuple(args)
def __eq__(self, other):
if not (self.__class__ == other.__class__ and self.symbol == other.symbol
and len(self.args) == len(other.args)):
return False
else:
for s,o in zip(self.args, other.args):
if not s == o:
return False
return True
def __str__(self):
return "%s %s(%s)" % ("PNE", self.symbol, ", ".join(map(str, self.args)))
def dump(self, indent=" "):
print "%s%s" % (indent, self._dump())
for arg in self.args:
arg.dump(indent + " ")
def _dump(self):
return str(self)
def instantiate(self, var_mapping, init_facts):
args = [conditions.ObjectTerm(var_mapping.get(arg.na
|
me, arg.name)) for arg in self.args]
pne = PrimitiveNumericExpression(sel
|
f.symbol, args)
assert not self.symbol == "total-cost"
# We know this expression is constant. Substitute it by corresponding
# initialization from task.
for fact in init_facts:
if isinstance(fact, FunctionAssignment):
if fact.fluent == pne:
return fact.expression
assert False, "Could not find instantiation for PNE!"
class FunctionAssignment(object):
def __init__(self, fluent, expression):
self.fluent = fluent
self.expression = expression
def __str__(self):
return "%s %s %s" % (self.__class__.__name__, self.fluent, self.expression)
def dump(self, indent=" "):
print "%s%s" % (indent, self._dump())
self.fluent.dump(indent + " ")
self.expression.dump(indent + " ")
def _dump(self):
return self.__class__.__name__
def instantiate(self, var_mapping, init_facts):
if not (isinstance(self.expression, PrimitiveNumericExpression) or
isinstance(self.expression, NumericConstant)):
raise ValueError("Cannot instantiate assignment: not normalized")
# We know that this assignment is a cost effect of an action (for initial state
# assignments, "instantiate" is not called). Hence, we know that the fluent is
# the 0-ary "total-cost" which does not need to be instantiated
assert self.fluent.symbol == "total-cost"
fluent = self.fluent
expression = self.expression.instantiate(var_mapping, init_facts)
return self.__class__(fluent, expression)
class Assign(FunctionAssignment):
def __str__(self):
return "%s := %s" % (self.fluent, self.expression)
class Increase(FunctionAssignment):
pass
|
inbloom/legacy-projects
|
lri-middleware/path_builder/form.py
|
Python
|
apache-2.0
| 1,107
| 0.001807
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You m
|
ay obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" B
|
ASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import web
from web import form as webform
import httpconfig
class Form(object):
"""Form class"""
def __init__(self, names=[]):
self._form = self.createForm(names)
self.httpConfig = httpconfig.HttpConfig(web.ctx.env["DOCUMENT_ROOT"])
@property
def form(self):
return self._form
def createForm(self, names=[]):
# Text area for sending path data
pathDataArea = webform.Textarea("", rows=30, cols=90, value="", id="pathData", hidden=True)
form = webform.Form(pathDataArea)
return form
|
christophreimer/pytesmo
|
pytesmo/time_series/plotting.py
|
Python
|
bsd-3-clause
| 6,133
| 0.000163
|
# Copyright (c) 2014,Vienna University of Technology,
# Department of Geodesy and Geoinformation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Vienna University of Technology,
# Department of Geodesy and Geoinformation nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL VIENNA UNIVERSITY OF TECHNOLOGY,
# DEPARTMENT OF GEODESY AND GEOINFORMATION BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Created on Mar 7, 2014
Plot anomalies around climatology using colors
@author: Christoph Paulik christoph.paulik@geo.tuwien.ac.at
'''
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pandas as pd
import pytesmo.time_series.anomaly as anom
def plot_clim_anom(df, clim=None, axes=None, markersize=0.75,
mfc='0.3', mec='0.3', clim_color='0.0',
clim_linewidth=0.5, clim_linestyle='-',
pos_anom_color='#799ADA', neg_anom_color='#FD8086',
anom_linewidth=0.2, add_titles=True):
"""
Takes a pandas DataFrame and calculates the climatology and anomaly
and plots them in a nice way for each column
Parameters
----------
df : pandas.DataFrame
clim : pandas.DataFrame, optional
if given these climatologies will be used
if not given then climatologies will be calculated
this DataFrame must have the same number of columns as df
and also the column names.
each climatology must have doy as index.
axes : list of matplotlib.Axes, optional
list of axes on which each column should be plotted
if not given a standard layout is generated
markersize : float, optional
size of the markers for the datapoints
mfc : matplotlib color, optional
markerfacecolor, color of the marker face
mec : matplotlib color, optional
markeredgecolor
clim_color : matplotlib color, optional
color of the climatology
clim_linewidth : float, optional
linewidth of the climatology
clim_linestyle : string, optional
linestyle of the climatology
pos_anom_color : matplotlib color, optional
color of the positive anomaly
neg_anom_color : matplotlib color, optional
color of the negative anomaly
anom_linewidth : float, optional
linewidth of the anomaly lines
add_titles : boolean, optional
if set each subplot will have it's column name as title
Default : True
Returns
-------
Figure : matplotlib.Figure
if no axes were given
axes : list of matploblib.Axes
if no axes were given
"""
if type(df) == pd.Series:
df = pd.DataFrame(df)
nr_columns = len(df.columns)
# make own axis if necessary
if axes is None:
own_axis = True
gs = gr
|
idspec.GridSpec(nr_columns, 1, right=0.8)
fig = plt.figure(num=None, figsize=(6, 2 * nr_columns),
dpi=150, facecolor='w', edgecolor='k')
last_axis = fig.add_subplot(gs[nr_columns - 1])
axes = []
for i, grid in enumerate(gs):
if i < nr_columns - 1:
ax = fig.add_subplot(grid, sharex=last_axis)
axes.append(ax)
ax.xaxis.set_visible(
|
False)
axes.append(last_axis)
else:
own_axis = False
for i, column in enumerate(df):
Ser = df[column]
ax = axes[i]
if clim is None:
clima = anom.calc_climatology(Ser)
else:
clima = clim[column]
anomaly = anom.calc_anomaly(Ser, climatology=clima, return_clim=True)
anomaly[Ser.name] = Ser
anomaly = anomaly.dropna()
pos_anom = anomaly[Ser.name].values > anomaly['climatology'].values
neg_anom = anomaly[Ser.name].values < anomaly['climatology'].values
ax.plot(anomaly.index, anomaly[Ser.name].values, 'o',
markersize=markersize, mfc=mfc, mec=mec)
ax.plot(anomaly.index, anomaly['climatology'].values,
linestyle=clim_linestyle,
color=clim_color,
linewidth=clim_linewidth)
ax.fill_between(anomaly.index,
anomaly[Ser.name].values,
anomaly['climatology'].values, interpolate=True,
where=pos_anom, color=pos_anom_color,
linewidth=anom_linewidth)
ax.fill_between(anomaly.index,
anomaly[Ser.name].values,
anomaly['climatology'].values, interpolate=True,
where=neg_anom, color=neg_anom_color,
linewidth=anom_linewidth)
if add_titles:
ax.set_title(column)
if own_axis:
return fig, axes
else:
return None, None
|
mateoqac/unqTip
|
gui/views/boardOption.py
|
Python
|
gpl-3.0
| 3,726
| 0.002952
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'boardOptions.ui'
#
# Created: Fri Oct 4 12:27:03 2013
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
from commons.i18n import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_BoardOptions(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.setWindowModality(QtCore.Qt.WindowModal)
Dialog.resize(450, 300)
Dialog.setMaximumSize(QtCore.QSize(450, 300))
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setGeometry(QtCore.QRect(60, 260, 251, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.widget = QtGui.QWidget(Dialog)
self.widget.setGeometry(QtCore.QRect(50, 30, 350, 191))
self.widget.setObjectName(_fromUtf8("widget"))
self.verticalLayout = QtGui.QVBoxLayout(self.widget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout.addWidget(self.label)
self.comboBox = QtGui.QComboBox(self.widget)
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.horizontalLayout.addWidget(self.comboBox)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_2 = QtGui.QLabel(self.widget)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_2.addWidget(self.label_2)
self.comboBox_2 = QtGui.QComboBox(self.widget)
self.comboBox_2.setObjectName(_fromUtf8("comboBox_2"))
self.horizontalLayout_2.addWidget(self.comboBox_2)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_3 = QtGui.QLabel(self.widget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_3.addWidget(self.label_3)
self.comboBox_3 = QtGui.QComboBox(self.widget)
self.comboBox_3.setObjectName(_fromUtf8("comboBox_3"
|
))
self.horizontalLayout_3.addWidget(self.comboBox_3)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.
|
setWindowTitle(QtGui.QApplication.translate("Dialog", i18n('Options Board'), None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", i18n("Balls"), None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Dialog", i18n("Size"), None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Dialog", i18n("Head"), None, QtGui.QApplication.UnicodeUTF8))
|
laputian/dml
|
mlp_test/test_compare_mlp_unit.py
|
Python
|
mit
| 1,003
| 0.004985
|
from compare_mlp import calculate_distance_pairs, load_models,get_fil
|
enames, plot_distance_pairs, plot_distances_from_target
import unittest
class DistanceTestCase(unittest.TestCase):
def setUp(self):
self.afiles = load_models(get_filenames("best_model_mlp", "zero_blur_a.pkl"))
self.bfiles = load_models(get_filenames("best_model_mlp", "rand.pkl"))
def te
|
stDistanceBetweenZeroAndRandModels(self):
distances = calculate_distance_pairs(self.afiles, self.bfiles)
plot_distance_pairs(distances)
def testDistanceBetweenZeroModelsAndZeroTarget(self):
plot_distances_from_target(self.afiles[-1], self.afiles)
def testDistanceBetweenRandModelsAndRandTarget(self):
plot_distances_from_target(self.bfiles[-1], self.bfiles)
# def testDistanceBetweenRandModelsAndItself(self):
# distances = calculate_distance_pairs(self.bfiles, self.bfiles)
# plot_distance_pairs(distances)
if __name__ == '__main__':
unittest.main()
|
metabrainz/acousticbrainz-server
|
db/api_key.py
|
Python
|
gpl-2.0
| 2,579
| 0.000775
|
import db
import db.exceptions
import sqlalchemy
import string
import random
KEY_LENGTH = 40
def generate(owner_id):
"""Generate new key for a specified user.
Doesn't check if user exists.
Args:
owner_id: ID of a user that will be associated with a key.
Returns:
Value of the new key.
"""
with db.engine.connect() as connection:
value = _generate_key(KEY_LENGTH)
connection.execute(sqlalchemy.text("""
INSERT INTO api_key (value, owner)
VALUES (:
|
value, :owner)
"""), {
"value": value,
"owner": owner_id
})
return value
def get_active(owner_id):
"""Get active keys for a user.
Doesn't check if user exists.
Args:
owner_id: ID of a user who owns the key.
Returns:
List of active API keys.
"""
with
|
db.engine.connect() as connection:
result = connection.execute(sqlalchemy.text("""
SELECT value
FROM api_key
WHERE owner = :owner
"""), {"owner": owner_id})
return [row["value"] for row in result.fetchall()]
def revoke(value):
"""Revoke key with a given value."""
with db.engine.connect() as connection:
connection.execute(sqlalchemy.text("""
UPDATE api_key
SET is_active = FALSE
WHERE value = :value
"""), {"value": value})
def revoke_all(owner_id):
"""Revoke all keys owned by a user."""
with db.engine.connect() as connection:
connection.execute(sqlalchemy.text("""
UPDATE api_key
SET is_active = FALSE
WHERE owner = :owner
"""), {"owner": owner_id})
def is_active(value):
"""Check if key is active.
Args:
value: Value of a key.
Returns:
True if key is active, False if it's not.
Raises:
NoDataFoundException: Specified key was not found.
"""
with db.engine.connect() as connection:
result = connection.execute(sqlalchemy.text("""
SELECT is_active
FROM api_key
WHERE value = :value
"""), {"value": value})
row = result.fetchone()
if not row:
raise db.exceptions.NoDataFoundException("Can't find specified API key.")
return row["is_active"]
def _generate_key(length):
"""Generates random string with a specified length."""
return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits)
for _ in range(length))
|
danielballan/scikit-xray
|
skbeam/io/avizo_io.py
|
Python
|
bsd-3-clause
| 10,337
| 0.000097
|
from __future__ import absolute_import, division, print_function
import numpy as np
import os
import logging
def _read_amira(src_file):
"""
Reads all information contained within standard AmiraMesh data sets.
Separate the header information from the image/volume, data.
Parameters
----------
src_file : str
The path and file name pointing to the AmiraMesh file to be loaded.
Returns
-------
am_header : list of strings
This list contains all of the raw information contained in the
AmiraMesh file header. Contains all of the raw header information
am_data : str
A compiled string containing all of the image array data, that was
stored in the source AmiraMesh data file. Contains the raw image data
"""
am_header = []
am_data = []
with open(os.path.normpath(src_file), 'r') as input_file:
while True:
line = input_file.readline()
am_header.append(line)
if (line == '# Data section follows\n'):
input_file.readline()
break
am_data = input_file.read()
return am_header, am_data
def _amira_data_to_numpy(am_data, header_dict, flip_z=True):
"""
Transform output of `_read_amira` to a numpy array of the dtype listed in
the AmiraMesh header dictionary. The standard format for Avizo Binary
files is IEEE binary. Big or little endian-ness is stipulated in the header
information, and is be assessed and taken into account by this function as
well, during the conversion process.
Parameters
----------
am_data : str
String object containing all of the image array data, formatted as IEEE
binary. Current dType options include:
float
short
ushort
byte
header_dict : dict
Metadata dictionary containing all relevant attributes pertaining to
the image array. This metadata dictionary is the output from the
function `_create_md_dict`.
flip_z : bool, optional.
Defaults to True
This option is included because the .am data sets evaluated thus far
have opposite z-axis indexing than numpy arrays. This switch currently
defaults to "True" in order to ensure that z-axis indexing remains
consistent with data processed using Avizo.
Setting this switch to "True" will flip the z-axis during processing,
and a value of "False" will keep the array is initially assigned during
the array reshaping step.
Returns
-------
output : ndarray
Numpy ndarray containing the image data converted from the AmiraMesh
file. This data array is ready for further processing using the NSLS-II
function library, or other operations able to operate on numpy arrays.
"""
Zdim = header_dict['array_dimensions']['z_dimension']
Ydim = header_dict['array_dimensions']['y_dimension']
Xdim = header_dict['array_dimensions']['x_dimension']
# Strip out null characters from the string of binary values
# Dictionary of the encoding types for AmiraMesh files
am_format_dict = {'BINARY-LITTLE-ENDIAN': '<',
'BINARY': '>',
'ASCII': 'unknown'}
# Dictionary of the data types encountered so far in AmiraMesh files
am_dtype_dict = {'float': 'f4',
'short': 'h4',
'ushort': 'H4',
'byte': 'b'}
# Had to split out the stripping of new line characters and conversion
# of the original string data based on whether source data is BINARY
# format or ASCII format. These format types require different stripping
# tools and different string conversion tools.
if header_dict['data_format'] == 'BINARY-LITTLE-ENDIAN':
data_strip = am_data.strip('\n')
flt_values = np.fromstring(
data_strip, (am_format_dict[header_dict['data_format']] +
am_dtype_dict[header_dict['data_type']]))
if header_dict['data_format'] == 'ASCII':
data_strip = am_data.translate(None, '\n')
string_list = data_strip.split(" ")
string_list = string_list[0:(len(string_list)-2)]
flt_values = np.array(
string_list).astype(am_dtype_dict[header_dict['data_type']])
# Resize the 1D array to the c
|
orrect ndarray dimensions
# Note that resize is in-place whereas reshape is not
flt_values.resize(Zdim, Ydim, Xdim)
output = flt_values
if flip_z:
output = flt_values[::-1, ..., ...]
return output
def _clean_amira_header(header_li
|
st):
"""
Strip the string list of all "empty" characters,including new line
characters ('\n') and empty lines. Splits each header line (which
originally is stored as a single string) into individual words, numbers or
characters, using spaces between words as the separating operator. The
output of this function is used to generate the metadata dictionary for
the image data set.
Parameters
----------
header_list : list of strings
This is the header output from the function _read_amira()
Returns
-------
clean_header : list of strings
This header list has been stripped and sorted and is now ready for
populating the metadata dictionary for the image data set.
"""
clean_header = []
for row in header_list:
split_header = filter(None, [word.translate(None, ',"')
for word in row.strip('\n').split()])
clean_header.append(split_header)
return clean_header
def _create_md_dict(clean_header):
"""
Populates the a dictionary with all information pertinent to the image
data set that was originally stored in the AmiraMesh file.
Parameters
----------
clean_header : list of strings
This is the output from the _sort_amira_header function.
"""
# Avizo specific metadata
md_dict = {'software_src': clean_header[0][1],
'data_format': clean_header[0][2],
'data_format_version': clean_header[0][3]}
if md_dict['data_format'] == '3D':
md_dict['data_format'] = clean_header[0][3]
md_dict['data_format_version'] = clean_header[0][4]
for header_line in clean_header:
hl = header_line
if 'define' in hl:
hl = hl
md_dict['array_dimensions'] = {
'x_dimension': int(hl[hl.index('define') + 2]),
'y_dimension': int(hl[hl.index('define') + 3]),
'z_dimension': int(hl[hl.index('define') + 4])}
elif 'Content' in hl:
md_dict['data_type'] = hl[hl.index('Content') + 2]
elif 'CoordType' in hl:
md_dict['coord_type'] = hl[hl.index('CoordType') + 1]
elif 'BoundingBox' in hl:
hl = hl
md_dict['bounding_box'] = {
'x_min': float(hl[hl.index('BoundingBox') + 1]),
'x_max': float(hl[hl.index('BoundingBox') + 2]),
'y_min': float(hl[hl.index('BoundingBox') + 3]),
'y_max': float(hl[hl.index('BoundingBox') + 4]),
'z_min': float(hl[hl.index('BoundingBox') + 5]),
'z_max': float(hl[hl.index('BoundingBox') + 6])}
# Parameter definition for voxel resolution calculations
bbox = [md_dict['bounding_box']['x_min'],
md_dict['bounding_box']['x_max'],
md_dict['bounding_box']['y_min'],
md_dict['bounding_box']['y_max'],
md_dict['bounding_box']['z_min'],
md_dict['bounding_box']['z_max']]
dims = [md_dict['array_dimensions']['x_dimension'],
md_dict['array_dimensions']['y_dimension'],
md_dict['array_dimensions']['z_dimension']]
# Voxel resolution calculation
resolution_list = []
for index in np.arange(len(dims)):
if dims[index] > 1:
resolution_list.append(
(bbox[(2*index+1)] -
|
mattclay/ansible
|
lib/ansible/plugins/lookup/lines.py
|
Python
|
gpl-3.0
| 2,214
| 0.005872
|
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
name: lines
author: Daniel Hokka Zakrisson (!UNKNOWN) <daniel@hozac.com>
version_added: "0.9"
short_description: read lines from command
description:
- Run one or more commands and split the output into lines, returning them as a list
options:
_terms:
description: command(s) to run
required: True
notes:
- Like all lookups, this runs on the Ansible controller and is unaffected by other keywords such as 'become'.
If you need to use different permissions, you must change the command or run Ansible as another user.
- Alternatively, you can use a shell/command task that runs against localhost and registers the result.
"""
EXAMPLES = """
- name: We could read the file directly, but this shows output from command
ansible.builtin.debug: msg="{{ item }} is an output line from running cat on /etc/motd"
with_lines: cat /etc/motd
- name: More useful example of looping over a command result
ansible.builtin.shell: "/usr/bin/frobnicate {{ item }}"
with_lines:
- "/usr/bin/frobnications_per_host --param {{ inventory_hostname }}"
"""
RETURN = """
_list:
description:
- lines of stdout from command
type: list
elements: str
"""
import subprocess
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_text
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
ret = []
|
for term in terms:
p = subprocess.Popen(term, cwd=self._loader.get_basedir(), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode == 0:
ret.extend([to_text(l) for l in stdout.splitlines()])
else:
|
raise AnsibleError("lookup_plugin.lines(%s) returned %d" % (term, p.returncode))
return ret
|
zxl200406/minos
|
supervisor/supervisor/medusa/thread/thread_channel.py
|
Python
|
apache-2.0
| 3,713
| 0.01185
|
# -*- Mode: Python -*-
VERSION_STRING = "$Id: thread_channel.py,v 1.3 2002/03/19 22:49:40 amk Exp $"
# This will probably only work on Unix.
# The disadvantage to this technique is that it wastes file
# descriptors (especially when compared to select_trigger.py)
# May be possible to do it on Win32, using TCP localhost sockets.
# [does winsock support 'socketpair'?]
import asyncore_25 as asyncore
import asynchat_25 as asynchat
import fcntl
import FCNTL
import os
import socket
import string
import thread
# this channel slaves off of another one. it starts a thread which
# pumps its output through the 'write' side of the pipe. The 'read'
# side of the pipe will then notify us when data is ready. We push
# this data on the owning data channel's output queue.
class thread_channel (asyncore.file_dispatcher):
buffer_size = 8192
def __init__ (self, channel, function, *args):
self.parent = channel
self.function = function
self.args = args
self.pipe = rfd, wfd = os.pipe()
asyncore.file_dispatcher.__init__ (self, rfd)
def start (self):
rfd, wfd = self.pipe
# The read side of the pipe is set to non-blocking I/O; it is
# 'owned' by medusa.
flags = fcntl.fcntl (rfd, FCNTL.F_GETFL, 0)
fcntl.fcntl (rfd, FCNTL.F_SETFL, flags | FCNTL.O_NDELAY)
# The write side of the pipe is left in blocking mode; it is
# 'owned' by the thread. However, we wrap it up as a file object.
# [who wants to 'write()' to a number?]
of = os.fdopen (wfd, 'w')
thread
|
.start_new_thread (
self.func
|
tion,
# put the output file in front of the other arguments
(of,) + self.args
)
def writable (self):
return 0
def readable (self):
return 1
def handle_read (self):
data = self.recv (self.buffer_size)
self.parent.push (data)
def handle_close (self):
# Depending on your intentions, you may want to close
# the parent channel here.
self.close()
# Yeah, it's bad when the test code is bigger than the library code.
if __name__ == '__main__':
import time
def thread_function (output_file, i, n):
print 'entering thread_function'
while n:
time.sleep (5)
output_file.write ('%2d.%2d %s\r\n' % (i, n, output_file))
output_file.flush()
n = n - 1
output_file.close()
print 'exiting thread_function'
class thread_parent (asynchat.async_chat):
def __init__ (self, conn, addr):
self.addr = addr
asynchat.async_chat.__init__ (self, conn)
self.set_terminator ('\r\n')
self.buffer = ''
self.count = 0
def collect_incoming_data (self, data):
self.buffer = self.buffer + data
def found_terminator (self):
data, self.buffer = self.buffer, ''
n = string.atoi (string.split (data)[0])
tc = thread_channel (self, thread_function, self.count, n)
self.count = self.count + 1
tc.start()
class thread_server (asyncore.dispatcher):
def __init__ (self, family=socket.AF_INET, address=('127.0.0.1', 9003)):
asyncore.dispatcher.__init__ (self)
self.create_socket (family, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind (address)
self.listen (5)
def handle_accept (self):
conn, addr = self.accept()
tp = thread_parent (conn, addr)
thread_server()
#asyncore.loop(1.0, use_poll=1)
asyncore.loop ()
|
Kaushikpatnaik/LSTMChar2Char
|
train.py
|
Python
|
mit
| 5,431
| 0.019518
|
'''
Training file with functions for
1) Taking in the inputs
2) Defining the model
3) Reading the input and generating batches
4) Defining the loss, learning rate and optimization functions
5) Running multiple epochs on training and testing
'''
import argparse
from read_input import *
from model import *
import tensorflow as tf
import time
def run_epoch(session, model, train_op, data, max_batches, args):
'''
Run the model under given session for max_batches based on args
:param model: model on which the operations take place
:param session: session for tensorflow
:param train_op: training output variable name, pass as tf.no_op() for validation and testing
:param data: train, validation or testing data
:param max_batches: maximum number of batches that can be called
:param args: arguments provided by user in main
:return: perplexity
'''
# to run a session you need the list of tensors/graph nodes and the feed dict
# for us its the cost, final_state, and optimizer
# you feed in the (x,y) pairs, and you also propagate the state across the batches
state = np.zeros((args.batch_size,model.lstm_layer.state_size))
tot_cost = 0.0
start_time = time.time()
iters = 0
for i in range(max_batches):
x, y = data.next()
cur_cost, curr_state, _ = session.run([model.cost,model.final_state,train_op],
feed_dict={model.input_layer: x, model.targets: y, model.initial_state: state})
tot_cost += cur_cost
state = curr_state
iters += args.batch_len
if i % (max_batches//50) == 0:
print 'iteration %.3f perplexity: %.3f speed: %.0f wps' %\
(i, np.exp(tot_cost/iters), iters*args.batch_size/(time.time()-start_time))
return np.exp(tot_cost/iters)
# TODO: Add model saving and loading
def main():
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--filename', type=str, default='./data/tinyshakespeare/input.txt', help='data location for all data')
parser.add_argument('--split_ratio', type =list, default=[0.9,0.05,0.05], help='split ratio for train, validation and test')
parser.add_argument('--batch_size', type=int, default=1, help='batch size for data')
parser.add_argument('--batch_len', type=int, default=1, help='number of time steps to unroll')
parser.add_argument('--cell', type=str, default='lstm', help='the cell type to use, currently only LSTM')
parser.add_argument('--num_layers', type=int, default=1, help='depth of hidden units in the model')
parser.add_argument('--hidden_units', type=int, default=32, help='number of hidden units in the cell')
parser.add_argument('--num_epochs', type=int, default=50, help='max number of epochs to run the training')
parser.add_argument('--lr_rate', type=float, default=2e-5, help='learning rate')
parser.add_argument('--lr_decay', type=float, default=0.97, help='learning rate decay')
parser.add_argument('--drop_prob', type=float, default=0, help='optimization function to be used')
parser.add_argument('--grad_clip', type=float, default=5.0, help='clip gradients at this value')
parser.add_argument('--stateful', type=bool, default=True, help='save at every batches')
args = parser.parse_args()
# load data
if args.filename[-3:] == 'zip':
data = load_zip_data(args.filename)
elif args.filename[-3:] == 'txt':
data = load_csv_file(args.filename)
|
else:
raise NotImplementedError("File extension not supported")
train, val ,test = train_test_split(data, args.split_ratio)
batch_train = BatchGenerator(train,args.batch_size,args.batch_len)
batch_train.create_batches()
max_batches_t
|
rain = batch_train.epoch_size
# New chars seen in test time will have a problem
args.data_dim = batch_train.vocab_size
batch_val = BatchGenerator(val,args.batch_size,args.batch_len)
batch_val.create_batches()
max_batches_val = batch_val.epoch_size
batch_test = BatchGenerator(test,args.batch_size,args.batch_len)
batch_test.create_batches()
max_batches_test = batch_test.epoch_size
print max_batches_train, max_batches_val, max_batches_test
# Initialize session and graph
with tf.Graph().as_default(), tf.Session() as session:
initializer = tf.random_uniform_initializer(-0.1,0.1)
with tf.variable_scope("model",reuse=None,initializer=initializer):
train_model = Model(args, is_training=True, is_inference=False)
with tf.variable_scope("model",reuse=True,initializer=initializer):
val_model = Model(args, is_training=False, is_inference=False)
test_model = Model(args, is_training=False, is_inference=False)
tf.initialize_all_variables().run()
for i in range(args.num_epochs):
# TODO: Add parameter for max_max_epochs
lr_decay = args.lr_decay ** max(i-10.0,0.0)
train_model.assign_lr(session, args.lr_rate*lr_decay)
# run a complete epoch and return appropriate variables
train_perplexity = run_epoch(session, train_model, train_model.train_op, batch_train, max_batches_train, args)
print 'Epoch %d, Train Perplexity: %.3f' %(i+1, train_perplexity)
val_perplexity = run_epoch(session, val_model, tf.no_op(), batch_val, max_batches_val, args)
print 'Epoch %d, Val Perplexity: %.3f' %(i+1, val_perplexity)
test_perplexity = run_epoch(session, test_model, tf.no_op(), batch_test, max_batches_test, args)
print 'Test Perplexity: %.3f' % test_perplexity
if __name__ == "__main__":
main()
|
erbridge/NQr
|
src/export.py
|
Python
|
bsd-3-clause
| 834
| 0
|
import sqlite3
def main():
conn = sqlite3.connect("../database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""
|
select score, path from tracks
where score is not null and missing is not 1""")
results = cursor.fetchall()
f
|
or result in results:
print(str(result[0]) + "\t" + result[1])
if __name__ == '__main__':
main()
|
bwhitelock/garmon-ng
|
garmon/audi_codes.py
|
Python
|
gpl-3.0
| 34,868
| 0.000488
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
#
# audi_codes.py
#
# Copyright (C) Ben Van Mechelen 2008-2009 <me@benvm.be>
#
# This file is part of Garmon
#
# Garmon is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
from gettext import gettext as _
DTC_CODES_MANUFACTURER = {
"P1101": _("O2 Sensor Circ.,Bank1-Sensor1Voltage too Low/Air Leak"),
"P1102": _("O2 Sensor Heating Circ.,Bank1-Sensor1 Short to B+"),
"P1103": _("O2 Sensor Heating Circ.,Bank1-Sensor1 Output too Low"),
"P1104": _("Bank1-Sensor2 Voltage too Low/Air Leak"),
"P1105": _("O2 Sensor Heating Circ.,Bank1-Sensor2 Short to B+"),
"P1106": _("O2 Sensor Circ.,Bank2-Sensor1 Voltage too Low/Air Leak"),
"P1107": _("O2 Sensor Heating Circ.,Bank2-Sensor1 Short to B+"),
"P1108": _("O2 Sensor Heating Circ.,Bank2-Sensor1 Output too Low"),
"P1109": _("O2 Sensor Circ.,Bank2-Sensor2 Voltage too Low/Air Leak"),
"P1110": _("O2 Sensor Heating Circ.,Bank2-Sensor2 Short to B+"),
"P1111": _("O2 Control (Bank 1) System too lean"),
"P1112": _("O2 Control (Bank 1) System too rich"),
"P1113": _("Bank1-Sensor1 Internal Resistance too High"),
"P1114": _("Bank1-Sensor2 Internal Resistant too High"),
"P1115": _("O2 Sensor Heater Circ.,Bank1-Sensor1 Short to Ground"),
"P1116": _("O2 Sensor Heater Circ.,Bank1-Sensor1 Open"),
"P1117": _("O2 Sensor Heater Circ.,Bank1-Sensor2 Short to Ground"),
"P1118": _("O2 Sensor Heater Circ.,Bank1-Sensor2 Open"),
"P1119": _("O2 Sensor Heater Circ.,Bank2-Sensor1 Short to Ground"),
"P1120": _("O2 Sensor Heater Circ.,Bank2-Sensor1 Open"),
"P1121": _("O2 Sensor Heater Circ.,Bank2-Sensor2 Short to Ground"),
"P1122": _("O2 Sensor Heater Circ.,Bank2-Sensor2 Open"),
"P1123": _("Long Term Fuel Trim Add.Air.,Bank1 System too Rich"),
"P1124": _("Long Term Fuel Trim Add.Air.,Bank1 System too Lean"),
"P1125": _("Long Term Fuel Trim Add.Air.,Bank2 System too Rich"),
"P1126": _("Long Term Fuel Trim Add.Air.,Bank2 System too Lean"),
"P1127": _("Long Term Fuel Trim mult.,Bank1 System too Rich"),
"P1128": _("Long Term Fuel Trim mult.,Bank1 System too Lean"),
"P1129": _("Long Term Fuel Trim mult.,Bank2 System too Rich"),
"P1130": _("Long Term Fuel Trim mult.,Bank2 System too Lean"),
"P1131": _("Bank2-Sensor1 Internal Rsistance too High"),
"P1132": _("O2 Sensor Heating Circ.,Bank1+2-Sensor1 Short to B+"),
"P1133": _("O2 Sensor Heating Circ.,Bank1+2-Sensor1 Electrical Malfunction"),
"P1134": _("O2 Sensor Heating Circ.,Bank1+2-Sensor2 Short to B+"),
"P1135": _("O2 Sensor Heating Circ.,Bank1+2-Sensor2 Electrical Malfunction"),
"P1136": _("Long Term Fuel Trim Add.Fuel,Bank1 System too Lean"),
"P1137": _("Long Term Fuel Trim Add.Fuel,Bank1 System too Rich"),
"P1138": _("Long Term Fuel Trim Add.Fuel,Bank2 System too Lean"),
"P1139": _("Long Term Fuel Trim Add.Fuel,Bank2 System too Rich"),
"P1140": _("Bank2-Sensor2 Internal Resistance too High"),
"P1141": _("Load Calculation Cross Check Range/Performance"),
"P1142": _("Load Calculation Cross Check Lower Limit Exceeded"),
"P1143": _("Load Calculation Cross Check Upper Limit Exceeded"),
"P1144": _("Mass or Volume Air Flow Circ Open/Short to Ground"),
"P1145": _("Mass or Volume Air Flow Circ Short to B+"),
"P1146": _("Mass or Volume Air Flow Circ Supply Malfunction"),
"P1147": _("O2 Control (Bank 2) System too lean"),
"P1148": _("O2 Control (Bank 2) System too rich"),
"P1149": _("O2 Control (Bank 1) Out of range"),
"P1150": _("O2 Control (Bank 2) Out of range"),
"P1151": _("Bank1, Long Term Fuel Trim, Range 1 Leanness Lower Limit Exceeded"),
"P1152": _("Bank1, Long Term Fuel Trim, Range 2 Leanness Lower Limit Exceeded"),
"P1154": _("Manifold Switch Over Malfunction"),
"P1155": _("Manifold Abs.Pressure Sensor Circ. Short to B+"),
"P1156": _("Manifold Abs.Pressure Sensor Circ. Open/Short to Ground"),
"P1157": _("Manifold Abs.Pressure Sensor Circ. Power Supply Malfunction"),
"P1158": _("Manifold Abs.Pressure Sensor Circ. Range/Performance"),
"P1160": _("Manifold Temp.Sensor Circ. Short to Ground"),
"P1161": _("Manifold Temp.Sensor Circ. Open/Short to B+"),
"P1162": _("Fuel Temp.Sensor Circ. Short to Ground"),
"P1163": _("Fuel Temp.Sensor Circ. Open/Short to B+"),
"P1164": _("Fuel Temperature Sensor Range/Performance/Incorrect Signal"),
"P1165": _("Bank1, Long Term Fuel Trim, Range 1 Rich Limit Exceeded"),
"P1166": _("Bank1, Long Term Fuel Trim, Range 2 Rich Limit Exceeded"),
"P1171": _("Throttle Actuation Potentiometer Sign.2 Range/Performance"),
"P1172": _("Throttle Actuation Potentiometer Sign.2 Signal too Low"),
"P1173": _("Throttle Actuation Potentiometer Sign.2 Signal too High"),
"P1174": _("Fuel Trim, Bank 1 Different injection times"),
"P1176": _("O2 Correction Behind Catalyst,B1 Limit Attained"),
"P1177": _("O2 Correction Behind Catalyst,B2 Limit Attained"),
"P1178": _("Linear 02 Sensor / Pump Current Open Circuit"),
"P1179": _("Linear 02 Sensor / Pump Current Short to ground"),
"P1180": _("Linear 02 Sensor / Pump Current Short to B+"),
"P1181": _("Linear 02 Sensor / Reference Voltage Open Circuit"),
"P1182": _("Linear 02 Sensor / Reference Voltage Short to ground"),
"P1183": _("Linear 02 Sensor / Reference Voltage Short to B+"),
"P1184": _("Linear 02 Sensor / Common Ground Wire Open Circuit"),
"P1185": _("Linear 02 Sensor / Common Ground Wire Short to ground"),
"P1186": _("Linear 02 Sensor / Common Ground Wire Short to B+"),
"P1187": _("Linear 02 Sensor / Compens. Resistor Open Circuit"),
"P1188": _("Linear 02 Sensor / Compens. Resistor Short to ground"),
"P1189": _("Linear 02 Sensor / Compens. Resistor Short to B+"),
"P1190": _("Linear 02 Sensor / Reference Voltage Incorrect Signal"),
"P1196": _("O2 Sensor Heater Circ.,Bank1-Sensor1 Electrical Malfunction"),
"P1197": _("O2 Sensor Heater Circ.,Bank2-Sensor1 Electrical Malfunction"),
"P1198": _("O2 Sensor Heater Circ.,Bank1-Sensor2 Electrical Malfunction"),
"P1199": _("O2 Sensor Heater Circ.,Bank2-Sensor2 Electrical Malfunction"),
"P1201": _("Cyl.1-Fuel Inj.Circ. Electrical Malfunction"),
"P1202": _("Cyl.2-Fuel Inj.Circ. Electrical Malfunction"),
"P1203": _("Cyl.3-Fuel Inj.Circ. Electrical Malfunction"),
"P1204": _("Cyl.4-Fuel Inj.Circ. Electrical Malfunction"),
"P1205": _("Cyl.5-Fuel Inj.Circ. Electrical Malfunction"),
"P1206": _("Cyl.6-Fuel Inj.Circ. Electrical Malfunction"),
"P1207": _("Cyl.7-Fuel Inj.Circ. Electrical Malfunction"),
"P1208": _("Cyl.8-Fuel Inj.Circ. Electrical Malfunction"),
"P1209": _("Intake valves for cylinder shut-off Short circuit to ground"),
"P1210": _("Intake valves for cylinder shut-off Short to B+"),
"P1211": _("Intake valves for cylinder shut-off Open circuit"),
"P1213": _("Cyl.1-Fuel Inj.Circ. Short to B+"),
"P1214": _("Cyl.2-Fuel Inj.Circ. Short to B+"),
"P1215":
|
_("Cyl.3-Fuel Inj.Circ. Short to B+"),
"P1216": _("Cyl.4-Fuel Inj.Circ. Short to B+"),
"P1217": _("Cyl.5-Fuel Inj.Circ. Short to B+"),
"P1218": _("Cyl.6-F
|
uel Inj.Circ. Short to B+"),
"P1219": _("Cyl.7-Fuel Inj.Circ. Short to B+"),
"P1220": _("Cyl.8-Fuel Inj.Circ. Short to B+"),
"P1221": _("Cylinder shut-off exhaust valves Short circuit to ground"),
"P1222": _("Cylinder shut-off exhaust valves Short to B+"),
"P1223": _
|
geotagx/geotagx-project-template
|
src/test_question.py
|
Python
|
agpl-3.0
| 2,076
| 0.010597
|
# This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along
|
with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from question import Question
class TestQuestion(unittest.TestCase):
def test_val
|
id_keys(self):
self.assertTrue(Question.iskey("A")[0], "Single-character")
self.assertTrue(Question.iskey("thisIsALongKey")[0], "Multi-character")
self.assertTrue(Question.iskey("--")[0], "Hyphens")
self.assertTrue(Question.iskey("--key")[0], "Leading hyphens")
self.assertTrue(Question.iskey("_")[0], "Underscores")
self.assertTrue(Question.iskey("__key")[0], "Leading underscores")
self.assertTrue(Question.iskey("_now-y0u_4re-pushing-1t")[0], "Mixed characters")
self.assertTrue(Question.iskey("_end")[0], "Not a reserved keyword")
def test_illegal_keys(self):
self.assertFalse(Question.iskey("")[0], "Empty string")
self.assertFalse(Question.iskey(" ")[0], "Whitespace only")
self.assertFalse(Question.iskey(" key")[0], "Leading whitespace")
self.assertFalse(Question.iskey("end\t")[0], "Traling tabulation")
self.assertFalse(Question.iskey("*$/\\")[0], "Non-alphanumeric characters")
self.assertFalse(Question.iskey("end")[0], "Reserved keyword")
self.assertFalse(Question.iskey("photoVisible")[0], "Reserved keyword")
self.assertFalse(Question.iskey(32768)[0], "Not a string")
self.assertFalse(Question.iskey("\n")[0], "Illegal escape character")
if __name__ == "__main__":
unittest.main()
|
FRC-RS/FRS
|
leaderboard/apps.py
|
Python
|
mit
| 98
| 0
|
from django.apps import AppConfig
class Leader
|
boar
|
d2Config(AppConfig):
name = 'leaderboard'
|
leppa/home-assistant
|
tests/util/test_json.py
|
Python
|
apache-2.0
| 2,695
| 0.000371
|
"""Test Home Assistant json utility functions."""
from json import JSONEncoder
import os
import sys
from tempfile import mkdtemp
import unittest
from unittest.mock import Mock
im
|
port pytest
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.json import SerializationError, load_json, save_json
# Test data that can be saved as JSON
TEST_JSON_A = {"a": 1, "B": "two"}
TEST_JSON_B = {"a": "one", "B": 2}
# Test data that can not be saved as JSON (keys must be strings)
TEST_BAD_OBJECT = {("A",): 1}
# Test data that can not be loaded as JSON
TEST_BAD_SERIALIED = "THIS IS NOT JSON\n"
TMP_DIR = None
def setup():
"""Set up for tests."""
global TMP_DIR
|
TMP_DIR = mkdtemp()
def teardown():
"""Clean up after tests."""
for fname in os.listdir(TMP_DIR):
os.remove(os.path.join(TMP_DIR, fname))
os.rmdir(TMP_DIR)
def _path_for(leaf_name):
return os.path.join(TMP_DIR, leaf_name + ".json")
def test_save_and_load():
"""Test saving and loading back."""
fname = _path_for("test1")
save_json(fname, TEST_JSON_A)
data = load_json(fname)
assert data == TEST_JSON_A
# Skipped on Windows
@unittest.skipIf(
sys.platform.startswith("win"), "private permissions not supported on Windows"
)
def test_save_and_load_private():
"""Test we can load private files and that they are protected."""
fname = _path_for("test2")
save_json(fname, TEST_JSON_A, private=True)
data = load_json(fname)
assert data == TEST_JSON_A
stats = os.stat(fname)
assert stats.st_mode & 0o77 == 0
def test_overwrite_and_reload():
"""Test that we can overwrite an existing file and read back."""
fname = _path_for("test3")
save_json(fname, TEST_JSON_A)
save_json(fname, TEST_JSON_B)
data = load_json(fname)
assert data == TEST_JSON_B
def test_save_bad_data():
"""Test error from trying to save unserialisable data."""
fname = _path_for("test4")
with pytest.raises(SerializationError):
save_json(fname, TEST_BAD_OBJECT)
def test_load_bad_data():
"""Test error from trying to load unserialisable data."""
fname = _path_for("test5")
with open(fname, "w") as fh:
fh.write(TEST_BAD_SERIALIED)
with pytest.raises(HomeAssistantError):
load_json(fname)
def test_custom_encoder():
"""Test serializing with a custom encoder."""
class MockJSONEncoder(JSONEncoder):
"""Mock JSON encoder."""
def default(self, o):
"""Mock JSON encode method."""
return "9"
fname = _path_for("test6")
save_json(fname, Mock(), encoder=MockJSONEncoder)
data = load_json(fname)
assert data == "9"
|
s-hertel/ansible
|
lib/ansible/utils/display.py
|
Python
|
gpl-3.0
| 19,334
| 0.001914
|
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ctypes.util
import errno
import fcntl
import getpass
import locale
import logging
import os
import random
import subprocess
import sys
import textwrap
import time
from struct import unpack, pack
from termios import TIOCGWINSZ
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.module_utils._text import to_bytes, to_text, to_native
from ansible.module_utils.six import with_metaclass, text_type
from ansible.utils.color import stringc
from ansible.utils.singleton import Singleton
from ansible.utils.unsafe_proxy import wrap_var
try:
# Python 2
input = raw_input
except NameError:
# Python 3, we already have raw_input
pass
_LIBC = ctypes.cdll.LoadLibrary(ctypes.util.find_library('c'))
# Set argtypes, to avoid segfault if the wrong type is provided,
# restype is assumed to be c_int
_LIBC.wcwidth.argtypes = (ctypes.c_wchar,)
_LIBC.wcswidth.argtypes = (ctypes.c_wchar_p, ctypes.c_int)
# Max for c_int
_MAX_INT = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
_LOCALE_INITIALIZED = False
_LOCALE_INITIALIZATION_ERR = None
def initialize_locale():
"""Set the locale to the users default setting
and set ``_LOCALE_INITIALIZED`` to indicate whether
``get_text_width`` may run into trouble
"""
global _LOCALE_INITIALIZED, _LOCALE_INITIALIZATION_ERR
if _LOCALE_INITIALIZED is False:
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
_LOCALE_INITIALIZATION_ERR = e
else:
_LOCALE_INITIALIZED = True
def get_text_width(text):
"""Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the
number of columns used to display a text string.
We try first with ``wcswidth``, and fallback to iterating each
character and using wcwidth individually, falling back to a value of 0
for non-printable wide characters
On Py2, this depends on ``locale.setlocale(locale.LC_ALL, '')``,
that in the case of Ansible is done in ``bin/ansible``
"""
if not isinstance(text, text_type):
raise TypeError('get_text_width requires text, not %s' % type(text))
if _LOCALE_INITIALIZATION_ERR:
Display().warning(
'An error occurred while calling ansible.utils.display.initialize_locale '
'(%s). This may result in incorrectly calculated text widths that can '
'cause Display to print incorrect line lengths' % _LOCALE_INITIALIZATION_ERR
)
elif not _LOCALE_INITIALIZED:
Display().warning(
'ansible.utils.display.initialize_locale has not been called, '
'this may result in incorrectly calculated text widths that can '
'cause Display to print incorrect line lengths'
)
try:
width = _LIBC.wcswidth(text, _MAX_INT)
except ctypes.ArgumentError:
width = -1
if width != -1:
return width
width = 0
counter = 0
for c in text:
counter += 1
if c in (u'\x08', u'\x7f', u'\x94', u'\x1b'):
# A few characters result in a subtraction of length:
# BS, DEL, CCH, ESC
# ESC is slightly different in that it's part of an escape sequence, and
# while ESC is non printable, it's part of an escape sequence, which results
# in a single non printable length
width -= 1
counter -= 1
continue
try:
w = _LIBC.wcwidth(c)
except ctypes.ArgumentError:
w = -1
if w == -1:
# -1 signifies a non-printable character
# use 0 here as a best effort
w = 0
width += w
if width == 0 and counter and not _LOCALE_INITIALIZED:
raise EnvironmentError(
'ansible.utils.display.initialize_locale has not been called, '
'and get_text_width could not calculate text width of %r' % text
)
# It doesn't make sense to have a negative printable width
return width if width >= 0 else 0
class FilterBlackList(logging.Filter):
def __init__(self, blacklist):
self.blacklist = [logging.Filter(name) for name in blacklist]
def filter(self, record):
return not any(f.filter(record) for f in self.blacklist)
class FilterUserInjector(logging.Filter):
"""
This is a filter which injects the current user as the 'user' attribute on each record. We need to add this filter
to all logger handlers so that 3rd party libraries won't print an exception due to user not being defined.
"""
try:
username = getpass.getuser()
except KeyError:
|
# people like to make containers w/o actual valid passwd/shadow and use host uids
username = 'uid=%s' % os.getuid()
def filter(self, record):
record.user = FilterUserInjector.username
return True
logger = None
# TODO: make this a callback event instead
if getattr(C, 'DEFAULT_LOG_PATH'):
path = C.DEFAUL
|
T_LOG_PATH
if path and (os.path.exists(path) and os.access(path, os.W_OK)) or os.access(os.path.dirname(path), os.W_OK):
# NOTE: level is kept at INFO to avoid security disclosures caused by certain libraries when using DEBUG
logging.basicConfig(filename=path, level=logging.INFO, # DO NOT set to logging.DEBUG
format='%(asctime)s p=%(process)d u=%(user)s n=%(name)s | %(message)s')
logger = logging.getLogger('ansible')
for handler in logging.root.handlers:
handler.addFilter(FilterBlackList(getattr(C, 'DEFAULT_LOG_FILTER', [])))
handler.addFilter(FilterUserInjector())
else:
print("[WARNING]: log file at %s is not writeable and we cannot create it, aborting\n" % path, file=sys.stderr)
# map color to log levels
color_to_log_level = {C.COLOR_ERROR: logging.ERROR,
C.COLOR_WARN: logging.WARNING,
C.COLOR_OK: logging.INFO,
C.COLOR_SKIP: logging.WARNING,
C.COLOR_UNREACHABLE: logging.ERROR,
C.COLOR_DEBUG: logging.DEBUG,
C.COLOR_CHANGED: logging.INFO,
C.COLOR_DEPRECATE: logging.WARNING,
C.COLOR_VERBOSE: logging.INFO}
b_COW_PATHS = (
b"/usr/bin/cowsay",
b"/usr/games/cowsay",
b"/usr/local/bin/cowsay", # BSD path for cowsay
b"/opt/local/bin/cowsay", # MacPorts path for cowsay
)
class Display(with_metaclass(Singleton, object)):
def __init__(self, verbosity=0):
self.columns = None
self.verbosity = verbosity
# list of all deprecation messages to prevent duplicate display
self._deprecations = {}
self._warns = {}
self._errors = {}
self.b_cowsay = None
self.noncow = C.ANSIBLE_COW_SELECTION
self.set_cowsay_info()
if self.b_cowsay:
try:
cmd = subprocess.Popen([self.b_cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
self.cows_available = set([to_text(c) for c in out.split()])
if C.ANSIBLE_COW_ACCEPTLIST and any(C.ANSIBLE_COW_ACCEPTLIST):
self.cows_available = set(C.ANSIBLE_COW_ACCEPTLIST).intersection(self.cows_available)
|
danstoner/python_experiments
|
playing_with_pygame/pygame-tutorial-series/part10.py
|
Python
|
gpl-2.0
| 3,658
| 0.020503
|
import pygame
import time
import random
pygame.init()
display_width = 800
display_height = 600
black = (0,0,0)
white = (255,255,255)
red = (255,0,0)
block_color = (53,115,255)
car_width = 73
gameDisplay = pygame.display.set_mode((display_width,display_height))
pygame.display.set_caption('A bit Racey')
clock = pygame.time.Clock()
carImg = pygame.image.load('racecar.png')
def things_dodged(count):
font = pygame.font.SysFont(None, 25)
text = font.render("Dodged: "+str(count), True, black)
gameDisplay.blit(text,(0,0))
def things(thingx, thingy, thingw, thingh, color):
pygame.draw.rect(gameDisplay, color, [thingx, thingy, thingw, thingh])
def car(x,y):
gameDisplay.blit(carImg,(x,y))
def text_objects(text, font):
textSurface = font.render(text, True, black)
return textSurface, textSurface.get_rect()
def message_display(text):
largeText = pygame.font.Font('freesansbold.ttf',115)
TextSurf, TextRect = text_objects(text, largeText)
TextRect.center = ((display_width/2),(display_height/2))
gameDisplay.blit(TextSurf, TextRect)
pygame.display.update()
time.sleep(2)
game_loop()
def crash():
message_display('You Crashed')
def game_intro():
intro = True
while intro:
for event in pygame.event.get():
print(event)
if event.type == pygame.QUIT:
pygame.quit()
quit()
gameDisplay.fill(white)
largeText = pygame.font.Font('freesansbold.ttf',115)
TextSurf, TextRect = text_objects("A bit Racey", largeText)
TextRect.center = ((display_width/2),(display_height/2))
gameDisplay.blit(TextSurf, TextRect)
pygame.display.update()
clock.tick(15)
def game_loop():
x = (display_width * 0.45)
y = (display_height * 0.8)
x_change = 0
thing_startx = random.randrange(0, display_width)
thing_starty = -600
thing_speed = 4
thing_width = 100
thing_height = 100
thingCount = 1
dodged = 0
gameExit = False
while not gameExit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
x_change = -5
if event.key == pygame.K_RIGHT:
x_change = 5
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K
|
_RIGHT:
x_change = 0
x += x_change
gameDisplay.fill(white)
# things(thingx, thingy, thingw, thingh, color)
things(thing_startx, thing_starty, thing_width, thing_height, block_color)
thing_starty += thing_speed
car(x,y)
things_dodged(dodged)
if x > display_width - car_width or x < 0:
crash()
if thing_starty > display_height:
|
thing_starty = 0 - thing_height
thing_startx = random.randrange(0,display_width)
dodged += 1
thing_speed += 1
thing_width += (dodged * 1.2)
if y < thing_starty+thing_height:
print('y crossover')
if x > thing_startx and x < thing_startx + thing_width or x+car_width > thing_startx and x + car_width < thing_startx+thing_width:
print('x crossover')
crash()
pygame.display.update()
clock.tick(60)
game_intro()
game_loop()
pygame.quit()
quit()
|
hbhzwj/GAD
|
gad/Experiment/EvalForBotnetDetection.py
|
Python
|
gpl-3.0
| 9,139
| 0.001532
|
#!/usr/bin/env python
""" Evaluate the performance of detector
get the statistical quantify for the hypotheis test
like False Alarm Rate.
"""
from __future__ import print_function, division, absolute_import
import copy, os
import collections
from ..Detector import MEM_FS
from ..Detector import BotDetector
from ..util import update_not_none, plt, np, DataRecorder
from ..util import zdump, zload, Load, get_detect_metric
from ..util import DataEndException
import itertools
import pandas
from .Detect import Detect
class BotnetDetectionEval(Detect):
"""plot ROC curve for the hypothesis test"""
def init_parser(self, parser):
super(BotnetDetectionEval, self).init_parser(parser)
parser.add_argument('--roc_thresholds', default=None, type=Load,
help=("any valid python expression. Thresholds used for get "
"roc curve"))
parser.add_argument('--label_col_name', default=None, type=str,
help="name of the label column")
parser.add_argument('--ip_col_names', default=None,
type=lambda x: x.split(','),
help="name of the ip columns")
@staticmethod
def parse_label(label):
return 'Botnet' in label
def get_ground_truth(self):
label_col_name = self.desc['label_col_name']
ip_col_names = self.desc['ip_col_names']
detect_rg = self.desc.get('detect_rg')
rg_type = self.desc['win_type']
assert len(ip_col_names) <= 2, "at most two IP columns are allowed."
fetch_columns = [label_col_name] + ip_col_names
data_records = self.detector.data_file.data.get_rows(fetch_columns,
rg=detect_rg,
rg_type=rg_type)
ground_truth_bot_ips = set()
all_ips = set()
for row in data_records:
if self.parse_label(row[0]): # is botflow
ground_truth_bot_ips.add(row[1])
ground_truth_bot_ips.add(row[2])
all_ips.add(row[1])
all_ips.add(row[2])
return {
'ground_truth_bot_ips': ground_truth_bot_ips,
'all_ips': all_ips,
}
@staticmethod
def get_detected_ips(label_info, detection):
ips = set()
for i, d in enumerate(detection):
if not d:
continue
ips |= set(label_info['win_ips'][i])
return ips
def eval(self):
thresholds = self.desc['roc_thresholds']
ground_truth = self.get_ground_truth()
self.logger.debug('# of ips in this time frame: %d.' %
(len(ground_truth['all_ips'])))
self.logger.debug('# of bot ips in this time frame: %d.' %
(len(ground_truth['ground_truth_bot_ips'])))
divs = self.detector.record_data['entropy']
divs = np.array(divs, dtype=float) / np.max(divs)
bot_detector_desc = copy.deepcopy(self.desc)
bot_detector_desc.update({
'threshold': 0,
'anomaly_detector': self.detector,
})
bot_detector = BotDetector.SoBotDet(bot_detector_desc)
data_recorder = DataRecorder()
res = np.zeros((len(thresholds), 2))
for i, threshold in enumerate(thresholds):
bot_detector.desc['threshold'] = threshold
self.logger.info('Start to detect with threshold %s ' % (threshold))
result = bot_detector.detect(None, anomaly_detect=False)
tp, fn, tn, fp, sensitivity, specificity = \
get_detect_metric(ground_truth['ground_truth_bot_ips'],
result['detected_bot_ips'],
ground_truth['all_ips'])
tpr = tp * 1.0 / (tp + fn) if (tp + fn) > 0 else float('nan')
|
fpr = fp * 1.0 / (fp + tn) if (fp + tn) > 0 else float('nan')
data_recorder.add(threshold=threshold, tp=tp, tn=tn, fp=fp, fn=fn,
tpr=tpr, fpr=fpr,
detect_result=result)
data_frame = data_recorder.to_pandas_dataframe()
data_frame.set_index(['threshold'], drop=False)
return {
'metric': data_frame,
'ground_truth_bot_ips': ground_truth['ground_truth_bot_ips'],
|
'all_ips': ground_truth['all_ips'],
}
def run(self):
self.desc = copy.deepcopy(self.args.config['DETECTOR_DESC'])
update_not_none(self.desc, self.args.__dict__)
self.detect()
return self.eval()
class TimeBasedBotnetDetectionEval(BotnetDetectionEval):
"""Calculate corrected metrics (tTP, tFN, tFP, tTN) for botnet detection.
Please refer to the following paper for the details:
Garcia, Sebastian, et al. 'An empirical comparison of botnet detection
methods.' Computers & Security 45 (2014): 100-123.
"""
def init_parser(self, parser):
super(TimeBasedBotnetDetectionEval, self).init_parser(parser)
parser.add_argument('--timeframe_size', default=None, type=float,
help=("--timeframe_size [float] the size of each time frame."
"Metrics (tTP, tFN, tFP, tTN) will be calculated for "
"each time frame."))
def parse_tuple(s):
return tuple(float(val) for val in
self.desc['timeframe_rg'].split[','])
parser.add_argument('--timeframe_rg', default=None, type=parse_tuple,
help=("comma-separated strings, the first one is start time, "
"the second one is end time. Data in the range will be "
"divided to timeframes for evaluation."))
parser.add_argument('--timeframe_decay_ratio', default=None, type=float,
help="parameter in the exp correction function.")
parser.add_argument('--output_prefix', default=None,
help='prefix for output file')
def get_roc_curve(self, stats):
thresholds = self.desc['roc_thresholds']
if 'threshold' not in stats.columns:
return
data_recorder = DataRecorder()
for threshold in thresholds:
threshold_stats = stats[stats.threshold==threshold]
sum_stats = threshold_stats.sum()
FPR = sum_stats.tFP / (sum_stats.tFP + sum_stats.tTN)
TPR = sum_stats.tTP / (sum_stats.tTP + sum_stats.tFN)
precision = sum_stats.tTP / (sum_stats.tTP + sum_stats.tFP)
f1_score = 2 * precision * TPR / (precision + TPR)
data_recorder.add(threshold=threshold,
FPR=FPR,
TPR=TPR,
precision=precision,
f1_score=f1_score)
return data_recorder.to_pandas_dataframe()
def run(self):
timeframe_rg = self.desc['timeframe_rg']
thresholds = self.desc['roc_thresholds']
assert len(timeframe_rg) == 2, "unknown format of timeframe_rg"
timeframe_size = self.desc['timeframe_size']
timeframe_decay_ratio = self.desc['timeframe_decay_ratio']
cur_time = timeframe_rg[0]
data_recorder = DataRecorder()
timeframe_idx = 0
while cur_time < timeframe_rg[1]:
self.desc['detect_rg'] = [cur_time, cur_time + timeframe_size]
self.detect()
try:
eval_result = self.eval()
except DataEndException:
self.logger.warning('Has read end of the data in evaluation!')
break
metric = eval_result['metric']
bot_ips = eval_result['ground_truth_bot_ips']
bot_ip_num = float(len(bot_ips))
normal_ip_num = float(len(eval_result['all_ips'])) - bot_ip_num
correct_value = np.exp(-1 * timeframe_decay_ratio * timeframe_idx) + 1
tTP = metric.tp * correct_value / bot_ip_num # UPDATE HERE
tFN = metric.fn * correct_value / bot_ip_num
tFP = me
|
google/tink
|
testing/cross_language/key_generation_consistency_test.py
|
Python
|
apache-2.0
| 15,388
| 0.004744
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests that keys are consistently accepted or rejected in all languages."""
import itertools
from typing import Iterable, Tuple
from absl import logging
from absl.testing import absltest
from absl.testing import parameterized
import tink
from tink import aead
from tink import daead
from tink import hybrid
from tink import mac
from tink import prf
from tink import signature
from tink.proto import common_pb2
from tink.proto import ecdsa_pb2
from tink.proto import tink_pb2
from util import supported_key_types
from util import testing_servers
# Test cases that succeed in a language but should fail
SUCCEEDS_BUT_SHOULD_FAIL = [
# TODO(b/160130470): In CC and Python Hybrid templates are not checked for
# valid AEAD params. (These params *are* checked when the key is used.)
('EciesAeadHkdfPrivateKey(NIST_P256,UNCOMPRESSED,SHA256,AesEaxKey(15,11))',
'cc'),
('EciesAeadHkdfPrivateKey(NIST_P256,UNCOMPRESSED,SHA256,AesEaxKey(15,11))',
'python'),
]
# Test cases that fail in a language but should succeed
FAILS_BUT_SHOULD_SUCCEED = [
# TODO(b/160134058) Java and Go do not accept templates with CURVE25519.
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA1,AesGcmKey(16))',
'java'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA1,AesGcmKey(16))',
'go'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA224,AesGcmKey(16))',
'java'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA224,AesGcmKey(16))',
'go'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA256,AesGcmKey(16))',
'java'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA256,AesGcmKey(16))',
'go'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA384,AesGcmKey(16))',
'java'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA384,AesGcmKey(16))',
'go'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA512,AesGcmKey(16))',
'java'),
('EciesAeadHkdfPrivateKey(CURVE25519,UNCOMPRESSED,SHA512,AesGcmKey(16))',
'go'),
]
HASH_TYPES = [
common_pb2.UNKNOWN_HASH, common_pb2.SHA1, common_pb2.SHA224,
common_pb2.SHA256, common_pb2.SHA384, common_pb2.SHA512
]
CURVE_TYPES = [
common_pb2.UNKNOWN_CURVE,
common_pb2.NIST_P256,
common_pb2.NIST_P384,
common_pb2.NIST_P521,
common_pb2.CURVE25519
]
EC_POINT_FORMATS = [
common_pb2.UNKNOWN_FORMAT,
common_pb2.UNCOMPRESSED,
common_pb2.COMPRESSED,
common_pb2.DO_NOT_USE_CRUNCHY_UNCOMPRESSED
]
SIGNATURE_ENCODINGS = [
ecdsa_pb2.UNKNOWN_ENCODING,
ecdsa_pb2.IEEE_P1363,
ecdsa_pb2.DER
]
TestCasesType = Iterable[Tuple[str, tink_pb2.KeyTemplate]]
def aes_eax_test_cases() -> TestCasesType:
for key_size in [15, 16, 24, 32, 64, 96]:
for iv_size in [11, 12, 16, 17, 24, 32]:
yield ('AesEaxKey(%d,%d)' % (key_size, iv_size),
aead.aead_key_templates.create_aes_eax_key_template(
key_size, iv_size))
def aes_gcm_test_cases() -> TestCasesType:
for key_size in [15, 16, 24, 32, 64, 96]:
yield ('AesGcmKey(%d)' % key_size,
aead.aead_key_templates.create_aes_gcm_key_template(key_size))
def aes_gcm_siv_test_cases() -> TestCasesType:
for key_size in [15, 16, 24, 32, 64, 96]:
yield ('AesGcmSivKey(%d)' % key_size,
aead.aead_key_templates.create_aes_gcm_siv_key_template(key_size))
def aes_ctr_hmac_aead_test_cases() -> TestCasesType:
def _test_case(aes_key_size=16, iv_size=16, hmac_key_size=16,
tag_size=16, hash_type=common_pb2.SHA256):
return ('AesCtrHmacAeadKey(%d,%d,%d,%d,%s)' %
(aes_key_size, iv_size, hmac_key_size, tag_size,
common_pb2.HashType.Name(hash_type)),
aead.aead_key_templates.create_aes_ctr_hmac_aead_key_template(
aes_key_size=aes_key_size,
iv_size=iv_size,
hmac_key_size=hmac_key_size,
tag_size=tag_size,
hash_type=hash_type))
for aes_key_size in [15, 16, 24, 32, 64, 96]:
for iv_size in [11, 12, 16, 17, 24, 32]:
yield _test_case(aes_key_size=aes_key_size, iv_size=iv_size)
for hmac_key_size in [15, 16, 24, 32, 64, 96]:
for tag_size in [9, 10, 16, 20, 21, 24, 32, 33, 64, 65]:
for hash_type in HASH_TYPES:
yield _test_case(hmac_key_size=hmac_key_size, tag_size=tag_size,
hash_type=hash_type)
def hmac_test_cases() -> TestCasesType:
def _test_case(key_size=32, tag_size=16, hash_type=common_pb2.SHA256):
return ('HmacKey(%d,%d,%s)' % (key_size, tag_size,
common_pb2.HashType.Name(hash_type)),
mac.mac_key_templates.create_hmac_key_template(
key_size, tag_size, hash_type))
for key_size in [15, 16, 24, 32, 64, 96]:
yield _test_case(key_size=key_size)
for tag_size in [9, 10, 16, 20, 21, 24, 32, 33, 64, 65]:
for hash_type in HASH_TYPES:
yield _test_case(tag_size=tag_size, hash_type=hash_type)
def aes_cmac_test_cases() -> TestCasesType:
def _test_case(key_size=32, tag_size=16):
return ('AesCmacKey(%d,%d)' % (key_size, tag_size),
mac.mac_key_templates.create_aes_cmac_key_template(
key_size, tag_size))
for key_size in [15, 16, 24, 32, 64, 96]:
yield _test_case(key_size=key_size)
for tag_size in [9, 10, 16, 20, 21, 24, 32, 33, 64, 65]:
yield _test_case(tag_size=tag_size)
def aes_cmac_prf_test_cases() -> TestCasesType:
for key_size in [15, 16, 24, 32, 64, 96]:
yield ('AesCmacPrfKey(%d)' % key_size,
prf.prf_key_templates._create_aes_cmac_key_template(key_size))
def hmac_prf_test_cases() -> TestCasesType:
def _test_case(key_size=32, hash_type=common_pb2.SHA256):
return ('HmacPrfKey(%d,%s)' % (key_size,
common_pb2.HashType.Name(hash_type)),
prf.prf_key_templates._create_hmac_key_template(
key_size, hash_type))
for key_size in [15, 16, 24, 32, 64, 96]:
yield _test_case(key_size=key_size)
for hash_type in HASH_TYPES:
yield _test_case(hash_type=hash_type)
def hkdf_prf_test_cases() -> TestCasesType:
|
def _test_case(key_size=32, hash_type=common_pb2.SHA256):
return ('HkdfPrfKey(%d,%s)' % (key_size,
common_pb2.HashType.Name(hash_type)),
prf.prf_key_templates._create_hkdf_key_template(
key_si
|
ze, hash_type))
for key_size in [15, 16, 24, 32, 64, 96]:
yield _test_case(key_size=key_size)
for hash_type in HASH_TYPES:
yield _test_case(hash_type=hash_type)
def aes_siv_test_cases() -> TestCasesType:
for key_size in [15, 16, 24, 32, 64, 96]:
yield ('AesSivKey(%d)' % key_size,
daead.deterministic_aead_key_templates.create_aes_siv_key_template(
key_size))
def ecies_aead_hkdf_test_cases() -> TestCasesType:
for curve_type in CURVE_TYPES:
for hash_type in HASH_TYPES:
ec_point_format = common_pb2.UNCOMPRESSED
dem_key_template = aead.aead_key_templates.AES128_GCM
yield ('EciesAeadHkdfPrivateKey(%s,%s,%s,AesGcmKey(16))' %
(common_pb2.EllipticCurveType.Name(curve_type),
common_pb2.EcPointFormat.Name(ec_point_format),
common_pb2.HashType.Name(hash_type)),
hybrid.hybrid_key_templates.create_ecies_aead_hkdf_key_template(
curve_type, ec_point_format, hash_type, dem_key_template))
for ec_point_format in EC_POINT_FORMATS:
curve_type = common_pb2.NIST_P256
hash_type = common_pb2.SHA256
dem_key_template = aead.aead_
|
codepanda/pycicl
|
tests/fixtures/parallel/tests/testfoo.py
|
Python
|
mit
| 2,035
| 0.064865
|
from unittest import TestCase
class TestFoo( TestCase ):
def test_foo_1( self ):
self.assertTrue( True )
def test_foo_2( self ):
self.assertTrue( True )
def t
|
est_foo_3( self ):
self.assertTrue( True )
def test_foo_4( self ):
self.assertTrue( True )
def test_foo_5( self ):
self.assertTrue( True )
def test_foo_6
|
( self ):
self.assertTrue( True )
def test_foo_7( self ):
self.assertTrue( True )
def test_foo_8( self ):
self.assertTrue( True )
def test_foo_9( self ):
self.assertTrue( True )
def test_foo_10( self ):
self.assertTrue( True )
def test_foo_11( self ):
self.assertTrue( True )
def test_foo_12( self ):
self.assertTrue( True )
def test_foo_13( self ):
self.assertTrue( True )
def test_foo_14( self ):
self.assertTrue( True )
def test_foo_15( self ):
self.assertTrue( True )
def test_foo_16( self ):
self.assertTrue( True )
def test_foo_17( self ):
self.assertTrue( True )
def test_foo_18( self ):
self.assertTrue( True )
def test_foo_19( self ):
self.assertTrue( True )
def test_foo_20( self ):
self.assertTrue( True )
def test_foo_21( self ):
self.assertTrue( True )
def test_foo_22( self ):
self.assertTrue( True )
def test_foo_23( self ):
self.assertTrue( True )
def test_foo_24( self ):
self.assertTrue( True )
def test_foo_25( self ):
self.assertTrue( True )
def test_foo_26( self ):
self.assertTrue( True )
def test_foo_27( self ):
self.assertTrue( True )
def test_foo_28( self ):
self.assertTrue( True )
def test_foo_29( self ):
self.assertTrue( True )
def test_foo_30( self ):
self.assertTrue( True )
def test_foo_31( self ):
self.assertTrue( True )
def test_foo_32( self ):
self.assertTrue( True )
|
sfstoolbox/sfs-python
|
sfs/fd/esa.py
|
Python
|
mit
| 11,543
| 0
|
"""Compute ESA driving functions for various systems.
ESA is abbreviation for equivalent scattering approach.
ESA driving functions for an edge-shaped SSD are provided below.
Further ESA for different geometries might be added here.
Note that mode-matching (such as NFC-HOA, SDM) are equivalent
to ESA in their specific geometries (spherical/circular, planar/linear).
"""
import numpy as _np
from scipy.special import jn as _jn, hankel2 as _hankel2
from . import secondary_source_line as _secondary_source_line
from . import secondary_source_point as _secondary_source_point
from .. import util as _util
def plane_2d_edge(omega, x0, n=[0, 1, 0], *, alpha=_np.pi*3/2, Nc=None,
c=None):
r"""Driving function for 2-dimensional plane wave with edge ESA.
Driving function for a virtual plane wave using the 2-dimensional ESA
for an edge-shaped secondary source distribution consisting of
monopole line sources.
Parameters
----------
omega : float
Angular frequency.
x0 : int(N, 3) array_like
Sequence of secondary source positions.
n : (3,) array_like, optional
Normal vector of synthesized plane wave.
alpha : float, optional
Outer angle of edge.
Nc : int, optional
Number of elements for series expansion of driving function. Estimated
if not given.
c : float, optional
Speed of sound
Returns
-------
d : (N,) numpy.ndarray
Complex weights of secondary sources.
selection : (N,) numpy.ndarray
Boolean array containing ``True`` or ``False`` depending on
whether the corresponding secondary source is "active" or not.
secondary_source_function : callable
A function that can be used to create the sound field of a
single secondary source. See `sfs.fd.synthesize()`.
Notes
-----
One leg of the secondary sources has to be located on the x-axis (y0=0),
the edge at the origin.
Derived from :cite:`Spors2016`
"""
x0 = _np.asarray(x0)
n = _util.normalize_vector(n)
k = _util.wavenumber(omega, c)
phi_s = _np.arctan2(n[1], n[0]) + _np.pi
L = x0.shape[0]
r = _np.linalg.norm(x0, axis=1)
phi = _np.arctan2(x0[:, 1], x0[:, 0])
phi = _np.where(phi < 0, phi + 2 * _np.pi, phi)
if Nc is None:
Nc = _np.ceil(2 * k * _np.max(r) * alpha / _np.pi)
epsilon = _np.ones(Nc) # weights for series expansion
epsilon[0] = 2
d = _np.zeros(L, dtype=complex)
for m in _np.arange(Nc):
nu = m * _np.pi / alpha
d = d + 1/epsilon[m] * _np.exp(1j*nu*_np.pi/2) * _np.sin(nu*phi_s) \
* _np.cos(nu*phi) * nu/r * _jn(nu, k*r)
d[phi > 0] = -d[phi > 0]
selection = _util.source_selection_all(len(x0))
return 4*_np.pi/alpha * d, selection, _secondary_source_line(omega, c)
def plane_2d_edge_dipole_ssd(omega, x0, n=[0, 1, 0], *, alpha=_np.pi*3/2,
Nc=None, c=None):
r"""Driving function for 2-dimensional plane wave with edge dipole ESA.
Driving function for a virtual plane wave using the 2-dimensional ESA
for an edge-shaped secondary source distribution consisting of
dipole line sources.
Parameters
----------
omega : float
Angular frequency.
x0 : int(N, 3) array_like
Sequence of secondary source positions.
n : (3,) array_like, optional
Normal vector of synthesized plane wave.
alpha : float, optional
Outer angle of edge.
Nc : int, optional
Number of elements for series expansion of driving function. Estimated
if not given.
c : float, optional
Speed of sound
Returns
-------
d : (N,) numpy.ndarray
Complex weights of secondary sources.
selection : (N,) numpy.ndarray
Boolean array containing ``True`` or ``False`` depending on
whether the corresponding secondary source is "active" or not.
secondary_source_function : callable
A function that can be used to create the sound field of a
single secondary source. See `sfs.fd.synthesize()`.
Notes
-----
One leg of the secondary sources has to be located on the x-axis (y0=0),
the edge at the origin.
Derived from :cite:`Spors2016`
"""
x0 = _np.asarray(x0)
n = _util.normalize_vector(n)
k = _util.wavenumber(omega, c)
phi_s = _np.arct
|
an2(n[1], n[0]) + _np.pi
L = x0.shape[0]
r = _np.linalg.norm(x0, axis=1)
phi = _np.arct
|
an2(x0[:, 1], x0[:, 0])
phi = _np.where(phi < 0, phi + 2 * _np.pi, phi)
if Nc is None:
Nc = _np.ceil(2 * k * _np.max(r) * alpha / _np.pi)
epsilon = _np.ones(Nc) # weights for series expansion
epsilon[0] = 2
d = _np.zeros(L, dtype=complex)
for m in _np.arange(Nc):
nu = m * _np.pi / alpha
d = d + 1/epsilon[m] * _np.exp(1j*nu*_np.pi/2) * _np.cos(nu*phi_s) \
* _np.cos(nu*phi) * _jn(nu, k*r)
return 4*_np.pi/alpha * d
def line_2d_edge(omega, x0, xs, *, alpha=_np.pi*3/2, Nc=None, c=None):
r"""Driving function for 2-dimensional line source with edge ESA.
Driving function for a virtual line source using the 2-dimensional ESA
for an edge-shaped secondary source distribution consisting of line
sources.
Parameters
----------
omega : float
Angular frequency.
x0 : int(N, 3) array_like
Sequence of secondary source positions.
xs : (3,) array_like
Position of synthesized line source.
alpha : float, optional
Outer angle of edge.
Nc : int, optional
Number of elements for series expansion of driving function. Estimated
if not given.
c : float, optional
Speed of sound
Returns
-------
d : (N,) numpy.ndarray
Complex weights of secondary sources.
selection : (N,) numpy.ndarray
Boolean array containing ``True`` or ``False`` depending on
whether the corresponding secondary source is "active" or not.
secondary_source_function : callable
A function that can be used to create the sound field of a
single secondary source. See `sfs.fd.synthesize()`.
Notes
-----
One leg of the secondary sources has to be located on the x-axis (y0=0),
the edge at the origin.
Derived from :cite:`Spors2016`
"""
x0 = _np.asarray(x0)
k = _util.wavenumber(omega, c)
phi_s = _np.arctan2(xs[1], xs[0])
if phi_s < 0:
phi_s = phi_s + 2 * _np.pi
r_s = _np.linalg.norm(xs)
L = x0.shape[0]
r = _np.linalg.norm(x0, axis=1)
phi = _np.arctan2(x0[:, 1], x0[:, 0])
phi = _np.where(phi < 0, phi + 2 * _np.pi, phi)
if Nc is None:
Nc = _np.ceil(2 * k * _np.max(r) * alpha / _np.pi)
epsilon = _np.ones(Nc) # weights for series expansion
epsilon[0] = 2
d = _np.zeros(L, dtype=complex)
idx = (r <= r_s)
for m in _np.arange(Nc):
nu = m * _np.pi / alpha
f = 1/epsilon[m] * _np.sin(nu*phi_s) * _np.cos(nu*phi) * nu/r
d[idx] = d[idx] + f[idx] * _jn(nu, k*r[idx]) * _hankel2(nu, k*r_s)
d[~idx] = d[~idx] + f[~idx] * _jn(nu, k*r_s) * _hankel2(nu, k*r[~idx])
d[phi > 0] = -d[phi > 0]
selection = _util.source_selection_all(len(x0))
return -1j*_np.pi/alpha * d, selection, _secondary_source_line(omega, c)
def line_2d_edge_dipole_ssd(omega, x0, xs, *, alpha=_np.pi*3/2, Nc=None,
c=None):
r"""Driving function for 2-dimensional line source with edge dipole ESA.
Driving function for a virtual line source using the 2-dimensional ESA
for an edge-shaped secondary source distribution consisting of dipole line
sources.
Parameters
----------
omega : float
Angular frequency.
x0 : (N, 3) array_like
Sequence of secondary source positions.
xs : (3,) array_like
Position of synthesized line source.
alpha : float, optional
Outer angle of edge.
Nc : int, optional
Number of elements for series expansion of driving function. Estimated
if not given.
c : float, optional
Speed of sound
Returns
|
victorpoluceno/python_kinect_socketio
|
urls.py
|
Python
|
bsd-2-clause
| 349
| 0.005731
|
from django.conf.urls.defaults import patterns, include, url
from singlecontrol.views import index, socketio
ur
|
lpatterns = patterns('',
url(r'^$', view=index, name='index'),
url(r'^socket\.io', view=socketio, name='socketio'),
)
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns += staticfiles_urlpatt
|
erns()
|
diogocs1/comps
|
web/addons/l10n_be_coda/__init__.py
|
Python
|
apache-2.0
| 1,105
| 0.00181
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hop
|
e that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import l10n_be_coda
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
emacsway/rope
|
ropetest/objectinfertest.py
|
Python
|
gpl-2.0
| 14,587
| 0
|
try:
import unittest2 as unittest
except ImportError:
import unittest
import rope.base.project
import rope.base.builtins
from rope.base import libutils
from ropetest import testutils
class ObjectInferTest(unittest.TestCase):
def setUp(self):
super(ObjectInferTest, self).setUp()
self.project = testutils.sample_project()
def tearDown(self):
testutils.remove_project(self.project)
super(ObjectInferTest, self).tearDown()
def test_simple_type_inferencing(self):
code = 'class Sample(object):\n pass\na_var = Sample()\n'
scope = libutils.get_string_scope(self.project, code)
sample_class = scope['Sample'].get_object()
a_var = scope['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_simple_type_inferencing_classes_defined_in_holding_scope(self):
code = 'class Sample(object):\n pass\n' \
'def a_func():\n a_var = Sample()\n'
scope = libutils.get_string_scope(self.project, code)
sample_class = scope['Sample'].get_object()
a_var = scope['a_func'].get_object().\
get_scope()['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_simple_type_inferencing_classes_in_class_methods(self):
code = 'class Sample(object):\n pass\n' \
'class Another(object):\n' \
' def a_method():\n a_var = Sample()\n'
scope = libutils.get_string_scope(self.project, code)
sample_class = scope['Sample'].get_object()
another_class = scope['Another'].get_object()
a_var = another_class['a_method'].\
get_object().get_scope()['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_simple_type_inferencing_class_attributes(self):
code = 'class Sample(object):\n pass\n' \
'class Another(object):\n' \
' def __init__(self):\n self.a_var = Sample()\n'
scope = libutils.get_string_scope(self.project, code)
sample_class = scope['Sample'].get_object()
another_class = scope['Another'].get_object()
a_var = another_class['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_simple_type_inferencing_for_in_class_assignments(self):
code = 'class Sample(object):\n pass\n' \
'class Another(object):\n an_attr = Sample()\n'
scope = libutils.get_string_scope(self.project, code)
sample_class = scope['Sample'].get_object()
another_class = scope['Another'].get_object()
an_attr = another_class['an_attr'].get_object()
self.assertEquals(sample_class, an_attr.get_type())
def test_simple_type_inferencing_for_chained_assignments(self):
mod = 'class Sample(object):\n pass\n' \
'copied_sample = Sample'
mod_scope = libutils.get_string_scope(self.project, mod)
sample_class = mod_scope['Sample']
copied_sample = mod_scope['copied_sample']
self.assertEquals(sample_class.get_object(),
copied_sample.get_object())
def test_following_chained_assignments_avoiding_circles(self
|
):
mod = 'class Sample(object):\n pass\n' \
'sample_class = Sample\n' \
'sample_class = sample_class\n'
mod_scope = libutils.get_string_scope(self.project, mod)
sample_class = mod_sc
|
ope['Sample']
sample_class_var = mod_scope['sample_class']
self.assertEquals(sample_class.get_object(),
sample_class_var.get_object())
def test_function_returned_object_static_type_inference1(self):
src = 'class Sample(object):\n pass\n' \
'def a_func():\n return Sample\n' \
'a_var = a_func()\n'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample']
a_var = scope['a_var']
self.assertEquals(sample_class.get_object(), a_var.get_object())
def test_function_returned_object_static_type_inference2(self):
src = 'class Sample(object):\n pass\n' \
'def a_func():\n return Sample()\n' \
'a_var = a_func()\n'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample'].get_object()
a_var = scope['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_recursive_function_returned_object_static_type_inference(self):
src = 'class Sample(object):\n pass\n' \
'def a_func():\n' \
' if True:\n return Sample()\n' \
' else:\n return a_func()\n' \
'a_var = a_func()\n'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample'].get_object()
a_var = scope['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_func_returned_obj_using_call_spec_func_static_type_infer(self):
src = 'class Sample(object):\n' \
' def __call__(self):\n return Sample\n' \
'sample = Sample()\na_var = sample()'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample']
a_var = scope['a_var']
self.assertEquals(sample_class.get_object(), a_var.get_object())
def test_list_type_inferencing(self):
src = 'class Sample(object):\n pass\na_var = [Sample()]\n'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample'].get_object()
a_var = scope['a_var'].get_object()
self.assertNotEquals(sample_class, a_var.get_type())
def test_attributed_object_inference(self):
src = 'class Sample(object):\n' \
' def __init__(self):\n self.a_var = None\n' \
' def set(self):\n self.a_var = Sample()\n'
scope = libutils.get_string_scope(self.project, src)
sample_class = scope['Sample'].get_object()
a_var = sample_class['a_var'].get_object()
self.assertEquals(sample_class, a_var.get_type())
def test_getting_property_attributes(self):
src = 'class A(object):\n pass\n' \
'def f(*args):\n return A()\n' \
'class B(object):\n p = property(f)\n' \
'a_var = B().p\n'
pymod = libutils.get_string_module(self.project, src)
a_class = pymod['A'].get_object()
a_var = pymod['a_var'].get_object()
self.assertEquals(a_class, a_var.get_type())
def test_getting_property_attributes_with_method_getters(self):
src = 'class A(object):\n pass\n' \
'class B(object):\n def p_get(self):\n return A()\n' \
' p = property(p_get)\n' \
'a_var = B().p\n'
pymod = libutils.get_string_module(self.project, src)
a_class = pymod['A'].get_object()
a_var = pymod['a_var'].get_object()
self.assertEquals(a_class, a_var.get_type())
def test_lambda_functions(self):
code = 'class C(object):\n pass\n' \
'l = lambda: C()\na_var = l()'
mod = libutils.get_string_module(self.project, code)
c_class = mod['C'].get_object()
a_var = mod['a_var'].get_object()
self.assertEquals(c_class, a_var.get_type())
def test_mixing_subscript_with_tuple_assigns(self):
code = 'class C(object):\n attr = 0\n' \
'd = {}\nd[0], b = (0, C())\n'
mod = libutils.get_string_module(self.project, code)
c_class = mod['C'].get_object()
a_var = mod['b'].get_object()
self.assertEquals(c_class, a_var.get_type())
def test_mixing_ass_attr_with_tuple_assignment(self):
code = 'class C(object):\n attr = 0\n' \
'c = C()\nc.attr, b = (0, C())\n'
mod = libutils.get_string_module(self.project, code)
c_class = mod['C'].get_object()
a_var = mod['b'].get_object()
|
jaraco/aspen
|
tests/test_website.py
|
Python
|
mit
| 14,886
| 0.00477
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import StringIO
from pytest import raises
from aspen.website import Website
from aspen.http.response import Response
from aspen.ex
|
ceptions import BadLocation
simple_error_spt = """
[---]
[---] text/plain via stdlib_format
{response.body}
"""
# Tests
# =====
def test_basic():
website = Website()
expected = os.getcwd()
actual = website.www_root
assert actual == expected
def test_normal_response_is_returned(harness):
harness.fs.www.mk(('index.html', "Greetings, program!"))
expected = '\r\n'.join("""\
HTTP/1.1
Content-Type: text/html
Greetings, program!
""".
|
splitlines())
actual = harness.client.GET()._to_http('1.1')
assert actual == expected
def test_fatal_error_response_is_returned(harness):
harness.fs.www.mk(('index.html.spt', "[---]\nraise heck\n[---]\n"))
expected = 500
actual = harness.client.GET(raise_immediately=False).code
assert actual == expected
def test_redirect_has_only_location(harness):
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
website.redirect('http://elsewhere', code=304)
[---]"""))
actual = harness.client.GET(raise_immediately=False)
assert actual.code == 304
headers = actual.headers
assert headers.keys() == ['Location']
def test_nice_error_response_is_returned(harness):
harness.short_circuit = False
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(500)
[---]"""))
assert harness.client.GET(raise_immediately=False).code == 500
def test_nice_error_response_is_returned_for_404(harness):
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(404)
[---]"""))
assert harness.client.GET(raise_immediately=False).code == 404
def test_response_body_doesnt_expose_traceback_by_default(harness):
harness.fs.project.mk(('error.spt', simple_error_spt))
harness.fs.www.mk(('index.html.spt', """
[---]
raise Exception("Can I haz traceback ?")
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 500
assert "Can I haz traceback ?" not in response.body
def test_response_body_exposes_traceback_for_show_tracebacks(harness):
harness.client.website.show_tracebacks = True
harness.fs.project.mk(('error.spt', simple_error_spt))
harness.fs.www.mk(('index.html.spt', """
[---]
raise Exception("Can I haz traceback ?")
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 500
assert "Can I haz traceback ?" in response.body
def test_default_error_simplate_doesnt_expose_raised_body_by_default(harness):
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(404, "Um, yeah.")
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 404
assert "Um, yeah." not in response.body
def test_default_error_simplate_exposes_raised_body_for_show_tracebacks(harness):
harness.client.website.show_tracebacks = True
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(404, "Um, yeah.")
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 404
assert "Um, yeah." in response.body
def test_nice_error_response_can_come_from_user_error_spt(harness):
harness.fs.project.mk(('error.spt', '[---]\n[---] text/plain\nTold ya.'))
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(420)
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 420
assert response.body == 'Told ya.'
def test_nice_error_response_can_come_from_user_420_spt(harness):
harness.fs.project.mk(('420.spt', """
[---]
msg = "Enhance your calm." if response.code == 420 else "Ok."
[---] text/plain
%(msg)s"""))
harness.fs.www.mk(('index.html.spt', """
from aspen import Response
[---]
raise Response(420)
[---]"""))
response = harness.client.GET(raise_immediately=False)
assert response.code == 420
assert response.body == 'Enhance your calm.'
def test_delegate_error_to_simplate_respects_original_accept_header(harness):
harness.fs.project.mk(('error.spt', """[---]
[---] text/fake
Lorem ipsum
[---] text/html
<p>Error</p>
[---] text/plain
Error
"""))
harness.fs.www.mk(('foo.spt',"""
from aspen import Response
[---]
raise Response(404)
[---] text/plain
"""))
response = harness.client.GET('/foo', raise_immediately=False, HTTP_ACCEPT=b'text/fake')
assert response.code == 404
assert 'text/fake' in response.headers['Content-Type']
def test_default_error_spt_handles_text_html(harness):
harness.fs.www.mk(('foo.html.spt',"""
from aspen import Response
[---]
raise Response(404)
[---]
"""))
response = harness.client.GET('/foo.html', raise_immediately=False)
assert response.code == 404
assert 'text/html' in response.headers['Content-Type']
def test_default_error_spt_handles_application_json(harness):
harness.fs.www.mk(('foo.json.spt',"""
from aspen import Response
[---]
raise Response(404)
[---]
"""))
response = harness.client.GET('/foo.json', raise_immediately=False)
assert response.code == 404
assert response.headers['Content-Type'] == 'application/json'
assert response.body == '''\
{ "error_code": 404
, "error_message_short": "Not Found"
, "error_message_long": ""
}
'''
def test_default_error_spt_application_json_includes_msg_for_show_tracebacks(harness):
harness.client.website.show_tracebacks = True
harness.fs.www.mk(('foo.json.spt',"""
from aspen import Response
[---]
raise Response(404, "Right, sooo...")
[---]
"""))
response = harness.client.GET('/foo.json', raise_immediately=False)
assert response.code == 404
assert response.headers['Content-Type'] == 'application/json'
assert response.body == '''\
{ "error_code": 404
, "error_message_short": "Not Found"
, "error_message_long": "Right, sooo..."
}
'''
def test_default_error_spt_falls_through_to_text_plain(harness):
harness.fs.www.mk(('foo.xml.spt',"""
from aspen import Response
[---]
raise Response(404)
[---]
"""))
response = harness.client.GET('/foo.xml', raise_immediately=False)
assert response.code == 404
assert response.headers['Content-Type'] == 'text/plain; charset=UTF-8'
assert response.body == "Not found, program!\n\n"
def test_default_error_spt_fall_through_includes_msg_for_show_tracebacks(harness):
harness.client.website.show_tracebacks = True
harness.fs.www.mk(('foo.xml.spt',"""
from aspen import Response
[---]
raise Response(404, "Try again!")
[---]
"""))
response = harness.client.GET('/foo.xml', raise_immediately=False)
assert response.code == 404
assert response.headers['Content-Type'] == 'text/plain; charset=UTF-8'
assert response.body == "Not found, program!\nTry again!\n"
def test_custom_error_spt_without_text_plain_results_in_406(harness):
harness.fs.project.mk(('error.spt', """
[---]
[---] text/html
<h1>Oh no!</h1>
"""))
harness.fs.www.mk(('foo.xml.spt',"""
from aspen import Response
[---]
raise Response(404)
[---]
"""))
response = harness.client.GET('/foo.xml', raise_immediately=False)
assert response.code == 406
def test_custom_error_spt_with_text_plain_works(harness):
harness.fs.project.mk(('error.spt', """
[---]
[---] text/plain
Oh no!
"""))
harness.fs.www.mk(('foo.xml.spt',"""
from aspen import Response
[---]
raise Response(404)
[---]
"""))
response = harness.client.GET('/foo.xml', raise_immediately=False)
assert response.code == 404
assert response.headers['Content-Type'] == 'text/plain; charset=UTF-8'
assert response.body == "Oh no!\n"
def test_autoindex_response_is_404_by_default(harness):
harness.fs.www.mk(('README', "Greetings, program!"))
assert harness.client.GET(raise_immediately=False).code == 404
def test_autoindex_response_is_returned(harness):
harness.fs
|
jarodwilson/atomic-reactor
|
tests/docker_mock.py
|
Python
|
bsd-3-clause
| 14,501
| 0.002
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
import os
import docker
from flexmock import flexmock
import requests
from atomic_reactor.constants import DOCKER_SOCKET_PATH
from atomic_reactor.util import ImageName
from tests.constants import COMMAND, IMPORTED_IMAGE_ID
old_ope = os.path.exists
mock_containers = \
[{'Created': 1430292310,
'Image': 'fedora',
'Names': ['/goofy_mayer'],
'Command': '/bin/bash',
'Id': 'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d',
'Status': 'Up 2 seconds'},
{'Created': 1430293290,
'Image': 'busybox:latest',
'Names': ['/boring_mestorf'],
'Id': '105026325ff668ccf4dc2bcf4f009ea35f2c6a933a778993e6fad3c50173aaab',
'Command': COMMAND}]
mock_image = \
{'Created': 1414577076,
'Id': '3ab9a7ed8a169ab89b09fb3e12a14a390d3c662703b65b4541c0c7bde0ee97eb',
'ParentId': 'a79ad4dac406fcf85b9c7315fe08de5b620c1f7a12f45c8185c843f4b4a49c4e',
'RepoTags': ['buildroot-fedora:latest'],
'Size': 0,
'VirtualSize': 856564160}
mock_images = None
mock_logs = 'uid=0(root) gid=0(root) groups=10(wheel)'
mock_build_logs = \
[{"stream": "Step 0 : FROM fedora:latest"},
{"status": "Pulling from fedora", "id": "latest"},
{"status": "Digest: sha256:c63476a082b960f6264e59ef0ff93a9169eac8daf59e24805e0382afdcc9082f"}, # noqa
{"status": "Status: Image is up to date for fedora:latest"},
{"stream": "Step 1 : RUN uname -a && env"},
{"stream": " ---> Running in 3600c91d1c40"},
{"stream": "Removing intermediate container 3600c91d1c40"},
{"stream": "Successfully built 1793c2380436"}]
mock_build_logs_failed = mock_build_logs + \
[{"errorDetail": {"code": 2, "message":
"The command &{[/bin/sh -c ls -lha /a/b/c]} returned a non-zero code: 2"},
"error": "The command &{[/bin/sh -c ls -lha /a/b/c]} returned a non-zero code: 2"}] # noqa
mock_pull_logs = \
[{"stream": "Trying to pull repository localhost:5000/busybox ..."},
{"status": "Pulling image (latest) from localhost:5000/busybox", "progressDetail": {}, "id": "8c2e06607696"}, # noqa
{"status": "Download complete", "progressDetail": {}, "id": "8c2e06607696"},
{"status": "Status: Image is up to date for localhost:5000/busybox:latest"}]
mock_pull_logs_failed = \
[{"errorDetail": {"message": "Error: image ***:latest not found"}, "error": "Error: image ***:latest not found"}] # noqa
mock_push_logs = \
[{"status": "The push refers to a repository [localhost:5000/busybox] (len: 1)"},
{"status": "Image already exists", "progressDetail": {}, "id": "17583c7dd0da"},
{"status": "Image already exists", "progressDetail": {}, "id": "d1592a710ac3"},
{"status": "latest: digest: sha256:afe8a267153784d570bfea7d22699c612a61f984e2b9a93135660bb85a3113cf size: 2735"}] # noqa
mock_push_logs_failed = \
[{"status": "The push refers to a repository [localhost:5000/busybox] (len: 1)"},
{"status": "Sending image list"},
{"errorDetail": {"message": "Put http://localhost:5000/v1/repositories/busybox/: dial tcp [::1]:5000: getsockopt: connection refused"}, "error": "Put http://localhost:5000/v1/repositories/busybox/: dial tcp [::1]:5000: getsockopt: connection refused"}] # noqa
mock_info = {
'BridgeNfIp6tables': True,
'BridgeNfIptables': True,
'Containers': 18,
'CpuCfsPeriod': True,
'CpuCfsQuota': True,
'Debug': False,
'DockerRootDir': '/var/lib/docker',
'Driver': 'overlay',
'DriverStatus': [['Backing Filesystem', 'xfs']],
'ExecutionDriver': 'native-0.2',
'ExperimentalBuild': False,
'HttpProxy': '',
'HttpsProxy': '',
'ID': 'YC7N:MYIE:6SEL:JYLU:SRIG:PCVV:APZD:WTH4:4MGR:N4BG:CT53:ZW2O',
'IPv4Forwarding': True,
'Images': 162,
'IndexServerAddress': 'https://index.docker.io/v1/',
'InitPath': '/usr/libexec/docker/dockerinit',
'InitSha1': 'eb5677df79a87639f30ab5c2c01e5170abc96af2',
'KernelVersion': '4.1.4-200.fc22.x86_64',
'Labels': None,
'LoggingDriver': 'json-file',
'MemTotal': 12285665280,
'MemoryLimit': True,
'NCPU': 4,
'NEventsListener': 0,
'NFd': 15,
'NGoroutines': 31,
'Name': 'the-build-host',
'NoProxy': '',
'OomKillDisable': True,
'OperatingSystem': 'Fedora 24 (Rawhide) (containerized)',
'RegistryConfig': {'IndexConfigs': {'127.0.0.1:5000': {'Mirrors': [],
'Name': '127.0.0.1:5000',
'Official': False,
'Secure': False},
'172.17.0.1:5000': {'Mirrors': [],
'Name': '172.17.0.1:5000',
|
'Official': False,
'Secure': False},
'172.17.0.2:5000': {'Mirrors': [],
'Name': '172.17.0.2:5000',
'Official': False,
'Secure': False},
|
'172.17.0.3:5000': {'Mirrors': [],
'Name': '172.17.0.3:5000',
'Official': False,
'Secure': False},
'docker.io': {'Mirrors': None,
'Name': 'docker.io',
'Official': True,
'Secure': True}
},
'InsecureRegistryCIDRs': ['127.0.0.0/8'], 'Mirrors': None},
'SwapLimit': True,
'SystemTime': '2015-09-15T16:38:50.585211559+02:00'
}
mock_version = {
'ApiVersion': '1.21',
'Arch': 'amd64',
'BuildTime': 'Thu Sep 10 17:53:19 UTC 2015',
'GitCommit': 'af9b534-dirty',
'GoVersion': 'go1.5.1',
'KernelVersion': '4.1.4-200.fc22.x86_64',
'Os': 'linux',
'Version': '1.9.0-dev-fc24'
}
mock_import_image = '{"status": "%s"}' % IMPORTED_IMAGE_ID
mock_inspect_container = {
'Id': 'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d',
'Mounts': [
{
"Source": "/mnt/tmp",
"Destination": "/tmp",
"Mode": "",
"RW": True,
"Propagation": "rprivate",
"Name": "test"
},
{
"Source": "/mnt/conflict_exception",
"Destination": "/exception",
"Mode": "",
"RW": True,
"Propagation": "rprivate",
"Name": "conflict_exception"
},
{
"Source": "/mnt/real_exception",
"Destination": "/exception",
"Mode": "",
"RW": True,
"Propagation": "rprivate",
"Name": "real_exception"
},
{
"Source": "",
"Destination": "/skip_me",
"Mode": "",
"RW": True,
"Propagation": "rprivate",
"Name": "skip_me"
}
]
}
def _find_image(img, ignore_registry=False):
global mock_images
tagged_img = ImageName.parse(img).to_str(explicit_tag=True)
for im in mock_images:
im_name = im['RepoTags'][0]
if im_name == tagged_img:
return im
if ignore_registry:
im_name_wo_reg = ImageName.parse(im_name).to_str(registry=False)
if im_name_wo_reg == tagged_img:
return im
return None
def _docker_exception(code=404, content='not found', exc_class=docker.errors.APIError):
response = flexmo
|
HumanExposure/factotum
|
dashboard/models/data_source.py
|
Python
|
gpl-3.0
| 1,472
| 0.002717
|
from django.db import models
from .common_info import CommonInfo
from django.utils import timezone
from django.urls import reverse
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.core.validators import URLValidator
def validate_nonzero(value):
if value == 0:
raise ValidationError(
_("Quantity {} is not allowed".format(value)), params={"value": value}
)
class DataSource(CommonInfo):
"""A parent container for DataGroup objects"""
STATE_CHOICES = (
("AT", "Awaiting Triage"),
("IP", "In Progress"),
("CO", "Complete"),
("ST", "Stale"),
)
PRIORITY_CHOICES = (("HI", "High"), ("MD", "Medium"), ("LO", "Low"))
title = models.CharField(max_length=50)
|
url = models.CharField(max_length=150, blank=True, validators=[URLValidator()])
estimated_records = models.PositiveIntegerField(
default=47,
validators=[validate_nonzero],
help_text="Estimated number of documents that the data source will eventually contain.",
)
state = models.CharField(max_len
|
gth=2, choices=STATE_CHOICES, default="AT")
description = models.TextField(blank=True)
priority = models.CharField(max_length=2, choices=PRIORITY_CHOICES, default="HI")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse("data_source_edit", kwargs={"pk": self.pk})
|
grimoirelab/perceval
|
tests/mocked_package/nested_package/nested_backend_b.py
|
Python
|
gpl-3.0
| 1,241
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2020 Bitergia
#
# Th
|
is program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option)
|
any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Valerio Cosentino <valcos@bitergia.com>
#
from perceval.backend import (Backend,
BackendCommand)
class BackendB(Backend):
"""Mocked backend class used for testing"""
def __init__(self, origin, tag=None, archive=None):
super().__init__(origin, tag=tag, archive=archive)
class BackendCommandB(BackendCommand):
"""Mocked backend command class used for testing"""
BACKEND = BackendB
def __init__(self, *args):
super().__init__(*args)
|
apporc/nova
|
nova/tests/unit/image/test_glance.py
|
Python
|
apache-2.0
| 53,589
| 0.000187
|
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from six.moves import StringIO
import glanceclient.exc
import mock
from oslo_config import cfg
from oslo_utils import netutils
import six
import testtools
from nova import context
from nova import exception
from nova.image import glance
from nova import test
CONF = cfg.CONF
NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
class tzinfo(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
return datetime.timedelta()
NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22, tzinfo=tzinfo())
class TestConversions(test.NoDBTestCase):
def test_convert_timestamps_to_datetimes(self):
fixture = {'name': None,
'properties': {},
'status': None,
'is_public': None,
'created_at': NOW_GLANCE_FORMAT,
'updated_at': NOW_GLANCE_FORMAT,
'deleted_at': NOW_GLANCE_FORMAT}
result = glance._convert_timestamps_to_datetimes(fixture)
self.assertEqual(result['created_at'], NOW_DATETIME)
self.assertEqual(result['updated_at'], NOW_DATETIME)
self.assertEqual(result['deleted_at'], NOW_DATETIME)
def _test_extracting_missing_attributes(self, include_locations):
# Verify behavior from glance objects that are missing attributes
# TODO(jaypipes): Find a better way of testing this crappy
# glanceclient magic object stuff.
class MyFakeGlanceImage(object):
def __init__(self, metadata):
IMAGE_ATTRIBUTES = ['size', 'owner', 'id', 'created_at',
'updated_at', 'status', 'min_disk',
'min_ram', 'is_public']
raw = dict.fromkeys(IMAGE_ATTRIBUTES)
raw.update(metadata)
self.__dict__['raw'] = raw
def __getattr__(self, key):
try:
return self.__dict__['raw'][key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
try:
self.__dict__['raw'][key] = value
except KeyError:
raise AttributeError(key)
metadata = {
'id': 1,
'created_at': NOW_DATETIME,
'updated_at': NOW_DATETIME,
}
image = MyFakeGlanceImage(metadata)
observed = glance._extract_attributes(
image, include_locations=include_locations)
expected = {
'id': 1,
'name': None,
'is_public': None,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': NOW_DATETIME,
'updated_at': NOW_DATETIME,
'deleted_at': None,
'deleted': None,
'status': None,
'properties': {},
'owner': None
}
if include_locations:
expected['locations'] = None
expected['direct_url'] = None
self.assertEqual(expected, observed)
def test_extracting_missing_attributes_include_locations(self):
self._test_extracting_missing_attributes(include_locations=True)
def test_extracting_missing_attributes_exclude_locations(self):
self._test_extracting_missing_attributes(include_locations=False)
class TestExceptionTranslations(test.NoDBTestCase):
def test_client_forbidden_to_imagenotauthed(self):
in_exc = glanceclient.exc.Forbidden('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotAuthorized)
def test_client_httpforbidden_converts_to_imagenotauthed(self):
in_exc = glanceclient.exc.HTTPForbidden('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotAuthorized)
def test_client_notfound_converts_to_imagenotfound(self):
in_exc = glanceclient.exc.NotFound('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotFound)
def test_client_httpnotfound_converts_to_imagenotfound(self):
in_exc = glanceclient.exc.HTTPNotFound('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotFound)
class TestGlanceSerializer(test.NoDBTestCase):
def test_serialize(self):
metadata = {'name': 'image1',
'is_public': True,
'foo': 'bar',
'properties': {
'prop1': 'propvalue1',
'mappings': [
{'virtual': 'aaa',
'device': 'bbb'},
{'virtual': 'xxx',
'device': 'yyy'}],
'block_device_mapping': [
{'virtual_device': 'fake',
'device_name': '/dev/fake'},
{'virtual_device': 'ephemeral0',
'device_name': '/dev/fake0'}]}}
# NOTE(tdurakov): Assertion of serialized objects won't work
# during using of random PYTHONHASHSEED. Assertion of
# serialized/deserialized object and initial one is enough
converted = glance._convert_to_string(metadata)
self.assertEqual(glance._convert_from_string(converted), metadata)
class TestGetImageService(test.NoDBTestCase):
@mock.patch.object(glance.GlanceClientWrapper, '__init__',
return_value=None)
def test_get_remote_service_from_id(self, gcwi_mocked):
id_or_uri = '123'
_ignored, image_id = glance.get_remote_image_service(
mock.sentinel.ctx, id_or_uri)
self.assertEqual(id_or_uri, image_id)
gcwi_mocked.assert_called_once_with()
@mock.patch.object(glance.GlanceClientWrapper, '__init__',
return_value=None)
d
|
ef test_get_remote_service_from_href(self, gcwi_mocked):
id_or_uri = 'http://127.0.0.1/123'
_ignored, image_id = glance.get_remote_image_service(
mock.sentinel.ctx, id_or_uri)
self.assertEqual('123', image_id)
gcwi_mocked.assert_called_once_with(context=mock.sentinel.ctx,
host='127.0.0.1',
port=80,
use_ssl=False)
class Te
|
stCreateGlanceClient(test.NoDBTestCase):
@mock.patch('oslo_utils.netutils.is_valid_ipv6')
@mock.patch('glanceclient.Client')
def test_headers_passed_glanceclient(self, init_mock, ipv6_mock):
self.flags(auth_strategy='keystone')
ipv6_mock.return_value = False
auth_token = 'token'
ctx = context.RequestContext('fake', 'fake', auth_token=auth_token)
host = 'host4'
port = 9295
use_ssl = False
expected_endpoint = 'http://host4:9295'
expected_params = {
'identity_headers': {
'X-Auth-Token': 'token',
'X-User-Id': 'fake',
'X-Roles': '',
'X-Tenant-Id': 'fake',
'X-Identity-Status': 'Confirmed'
},
'token':
|
GeKeShi/cluster-dp
|
cluster_image.py
|
Python
|
gpl-3.0
| 1,589
| 0.004405
|
# import os
import numpy as np
import matplotlib.pyplot as plt
import
|
random
# cluster_dp_GPU = "./cluster_dp_GPU"
# os.system(cluster_dp_GPU)
input_file = raw_input("enter the input file name:")
result_file = raw_input("enter the result file nam
|
e:")
location = []
# input_lable = []
for line in open("dataset/"+input_file, "r"):
# line = line.replace('-','')
items = line.strip("\n").split(",")
# input_lable.append(int(items.pop()))
tmp = []
for item in items:
tmp.append(float(item))
location.append(tmp)
location = np.array(location)
# input_lable = np.array(input_lable)
length = len(location)
print "data input complete"
result_lable = []
for line in open(result_file, "r"):
items = line.strip("\n").split(",")
result_lable.append(int(items.pop()))
print "result read complete"
R = range(256)
random.shuffle(R)
random.shuffle(R)
R = np.array(R) / 255.0
G = range(256)
random.shuffle(G)
random.shuffle(G)
G = np.array(G) / 255
B = range(256)
random.shuffle(B)
random.shuffle(B)
B = np.array(B) / 255.0
colors = []
for i in range(256):
colors.append((R[i], G[i], B[i]))
# plt.figure()
# for i in range(length):
# index = input_lable[i]
# plt.plot(location[i][0], location[i][1], color=(R[index*5%255],G[index*15%255],B[index*20%255]), marker='.')
# plt.xlabel('x'), plt.ylabel('y')
# plt.show()
# plt.close()
plt.figure()
for i in range(length):
index = result_lable[i]
plt.plot(location[i][0], location[i][1], color=(R[index*5%255],G[index*15%255],B[index*20%255]), marker='.')
plt.xlabel('x'), plt.ylabel('y')
plt.show()
|
skoslowski/gnuradio
|
gr-digital/python/digital/qa_clock_recovery_mm.py
|
Python
|
gpl-3.0
| 4,755
| 0.003785
|
#!/usr/bin/env python
#
# Copyright 2011-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import random
import cmath
from gnuradio import gr, gr_unittest, digital, blocks
class test_clock_recovery_mm(gr_unittest.TestCase):
def setUp(self):
random.seed(0)
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test01(self):
# Test complex/complex version
omega = 2
gain_omega = 0.001
mu = 0.5
gain_mu = 0.01
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_cc(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 100*[complex(1, 1),]
self.src = blocks.vector_source_c(data, False)
self.snk = blocks.vector_sink_c()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 100*[complex(0.99972, 0.99972)] # doesn't quite get to 1.0
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 30
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertComplexTuplesAlmostEqual(expected_result, dst_data, 5)
def test02(self):
# Test float/float version
omega = 2
gain_omega = 0.01
mu = 0.5
gain_mu = 0.01
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_ff(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 100*[1,]
self.src = blocks.vector_source_f(data, False)
self.snk = blocks.vector_sink_f()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 100*[0.9997, ] # doesn't quite get to 1.0
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 30
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertFloatTuplesAlmostEqual(expected_result, dst_data, 4)
def test03(self):
# Test complex/complex version with varying input
omega = 2
gain_omega = 0.01
mu = 0.25
gain_mu = 0.01
omega_rel_lim = 0.0001
self.test = digital.clock_recovery_mm_cc(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 1000*[complex(1, 1), complex(1, 1), complex(-1, -1), complex(-1, -1)]
self.src = blocks.vector_source_c(data, False)
self.snk = blocks.vector_sink_c()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 1000*[complex(-1.2, -1.2), complex(1.2, 1.2)]
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 100
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertComplexTuplesAlmostEqual(expected_result, dst_data, 1)
def test04(self):
# Test float/float version
omega = 2
gain_omega = 0.01
mu = 0.25
gain_mu = 0.1
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_ff(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 1000*[1, 1, -1, -1]
self.src = blocks.vector_source_f(data, False)
self.snk = blocks.vector_sink_f()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 1000*[-1.2, 1.2]
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 100
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expect
|
ed_result[le
|
n_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertFloatTuplesAlmostEqual(expected_result, dst_data, 1)
if __name__ == '__main__':
gr_unittest.run(test_clock_recovery_mm, "test_clock_recovery_mm.xml")
|
dana-i2cat/felix
|
ofam/src/src/ext/sfa/util/faults.py
|
Python
|
apache-2.0
| 11,639
| 0.01452
|
#----------------------------------------------------------------------
# Copyright (c) 2008 Board of Trustees, Princeton University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
#
# SFA API faults
#
#
### $Id$
### $URL$
import xmlrpclib
class SfaFault(xmlrpclib.Fault):
def __init__(self, faultCode, faultString, extra = None):
if extra:
faultString += ": " + str(extra)
xmlrpclib.Fault.__init__(self, faultCode, faultString)
class SfaInvalidAPIMethod(SfaFault):
def __init__(self, method, interface = None, extra = None):
faultString = "Invalid method " + method
if interface:
faultString += " for interface " + interface
SfaFault.__init__(self, 100, faultString, extra)
class SfaInvalidArgumentCount(SfaFault):
def __init__(self, got, min, max = min, extra = None):
if min != max:
expected = "%d-%d" % (min, max)
else:
expected = "%d" % min
faultString = "Expected %s arguments, got %d" % \
(expected, got)
SfaFault.__init__(self, 101, faultString, extra)
class SfaInvalidArgument(SfaFault):
def __init__(self, extra = None, name = None):
if name is not None:
faultString = "Invalid %s value" % name
else:
faultString = "Invalid argument"
SfaFault.__init__(self, 102, faultString, extra)
class SfaAuthenticationFailure(SfaFault):
def __init__(self, extra = None):
faultString = "Failed to authenticate call"
SfaFault.__init__(self, 103, faultString, extra)
class SfaDBError(SfaFault):
def __init__(self, extra = None):
faultString = "Database error"
SfaFault.__init__(self, 106, faultString, extra)
class SfaPermissionDenied(SfaFault):
def __init__(self, extra = None):
faultString = "Permission denied"
SfaFault.__init__(self, 108, faultString, extra)
class SfaNotImplemented(SfaFault):
def __init__(self, interface=None, extra = None):
faultString = "Not implemented"
if interface:
faultString += " at interface " + interface
SfaFault.__init__(self, 109, faultString, extra)
class SfaAPIError(SfaFault):
def __init__(self, extra = None):
faultString = "Internal API error"
SfaFault.__init__(self, 111, faultString, extra)
class MalformedHrnException(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Malformed HRN: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class TreeException(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Tree Exception: %(value)s, " % locals()
SfaFault.__init__(self, 111, faultString, extra)
def __str__(self):
return repr(self.value)
class NonExistingRecord(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Non exsiting record %(value)s, " % locals()
SfaFault.__init__(self, 111, faultString, extra)
def __str__(self):
return repr(self.value)
class ExistingRecord(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Existing record: %(value)s, " % locals()
SfaFault.__init__(self, 111, faultString, extra)
def __str__(self):
return repr(self.value)
class NonexistingCredType(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Non existing record: %(value)s, " % locals()
SfaFault.__init__(self, 111, faultString, extra)
def __str__(self):
return repr(self.value)
class NonexistingFile(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Non existing file: %(value)s, " % locals()
SfaFault.__init__(self, 111, faultString, extra)
def __str__(self):
return repr(self.value)
class InvalidRPCParams(SfaFault):
def __init__(self, value, extra = None):
self.value = value
|
faultString = "Invalid RPC Params: %(value)s, " % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
# SMBAKER exceptions follow
class ConnectionKeyGIDMismatch(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Connection
|
Key GID mismatch: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class MissingCallerGID(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Missing Caller GID: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class RecordNotFound(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Record not found: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class UnknownSfaType(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Unknown SFA Type: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class MissingAuthority(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Missing authority: %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class PlanetLabRecordDoesNotExist(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "PlanetLab record does not exist : %(value)s" % locals()
SfaFault.__init__(self, 102, faultString, extra)
def __str__(self):
return repr(self.value)
class PermissionError(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Permission error: %(value)s" % locals()
SfaFault.__init__(self, 108, faultString, extra)
def __str__(self):
return repr(self.value)
class InsufficientRights(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Insufficient rights: %(value)s" % locals()
SfaFault.__init__(self, 108, faultString, extra)
def __str__(self):
return repr(self.value)
class MissingDelegateBit(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString = "Missing delegate bit: %(value)s" % locals()
SfaFault.__init__(self, 108, faultString, extra)
def __str__(self):
return repr(self.value)
class ChildRightsNotSubsetOfParent(SfaFault):
def __init__(self, value, extra = None):
self.value = value
faultString =
|
Emmanu12/Image-Classification-using-SVM
|
predict.py
|
Python
|
apache-2.0
| 4,390
| 0.023235
|
import cv2
import numpy
from PIL import Image
import numpy as np
import os
from matplotlib import pyplot as plt
bin_n = 16 #
|
Number of bins
def hog(img):
gx = cv2
|
.Sobel(img, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(img, cv2.CV_32F, 0, 1)
mag, ang = cv2.cartToPolar(gx, gy)
bins = np.int32(bin_n*ang/(2*np.pi)) # quantizing binvalues in (0...16)
bin_cells = bins[:10,:10], bins[10:,:10], bins[:10,10:], bins[10:,10:]
mag_cells = mag[:10,:10], mag[10:,:10], mag[:10,10:], mag[10:,10:]
hists = [np.bincount(b.ravel(), m.ravel(), bin_n) for b, m in zip(bin_cells, mag_cells)]
hist = np.hstack(hists) # hist is a 64 bit vector
return hist
print "OpenCV version : {0}".format(cv2.__version__)
svm_params = dict( kernel_type = cv2.SVM_LINEAR,
svm_type = cv2.SVM_C_SVC,
C=2.67, gamma=5.383 )
def predict_class(path):
pre_out=''
print type(pre_out)
training_set = []
test_set=[]
color_test_set=[]
training_labels=[]
###### SVM training ########################
svm = cv2.SVM()
svm.load('hog_svm_data1.dat')
###### Now testing HOG ########################
img = cv2.imread(path)
res=cv2.resize(img,(400,300))
h=hog(res)
test_set.append(h)
testData = np.float32(test_set)
result = svm.predict(testData)
if result==1:
pre_out+= 'Vehicle'
elif result==2:
pre_out+= 'Animal'
elif result==3:
pre_out+= 'Building'
###### Now testing Color ########################
svm1 = cv2.SVM()
svm1.load('color_svm_data.dat')
img = cv2.imread(path)
res=cv2.resize(img,(400,300))
crop_img = res[50:150, 100:200]
cv2.imwrite("d:/Emmanu/project-data/color-test.jpg", crop_img)
img = Image.open('d:/Emmanu/project-data/color-test.jpg')
img200=img.convert('RGBA')
arr= np.array(img200)
flat_arr= arr.ravel()
color_test_set.append(flat_arr)
testData = np.float32(color_test_set)
result = svm1.predict(testData)
if result==1:
pre_out+=' and '+ 'It has Red Shade'
elif result==2:
pre_out+=' and '+ 'It has Green Shade'
elif result==3:
pre_out+=' and '+ 'It has Blue Shade'
elif result==4:
pre_out+=' and '+ 'It has Black Shade'
elif result==5:
pre_out+=' and '+ 'It has Brown Shade'
elif result==6:
pre_out+=' and '+ 'It has Yellow Shade'
elif result==7:
pre_out+=' and '+ 'It has white Shade'
return pre_out
def predict_shape(path,val):
training_set = []
test_set=[]
test_set1=[]
color_test_set=[]
training_labels=[]
result_list=[]
###### SVM training ########################
svm = cv2.SVM()
svm.load('hog_svm_data1.dat')
svm1 = cv2.SVM()
svm1.load('hog_svm_data2.dat')
###### Now testing HOG ########################
img = cv2.imread(path)
res=cv2.resize(img,(400,300))
h=hog(res)
test_set.append(h)
testData = np.float32(test_set)
pre_shape = svm.predict(testData)
if val==3:
if pre_shape==2:
img = cv2.imread(path)
res=cv2.resize(img,(400,300))
h=hog(res)
test_set1.append(h)
testData = np.float32(test_set1)
pre_shape = svm1.predict(testData)
print 'inside'
return pre_shape
return pre_shape
def predict_color(path):
training_set = []
test_set=[]
color_test_set=[]
training_labels=[]
result_list=[]
###### Now testing Color ########################
svm1 = cv2.SVM()
svm1.load('color_svm_data.dat')
img = cv2.imread(path)
res=cv2.resize(img,(400,300))
crop_img = res[50:150, 100:200]
cv2.imwrite("d:/Emmanu/project-data/color-test.jpg", crop_img)
img = Image.open('d:/Emmanu/project-data/color-test.jpg')
img200=img.convert('RGBA')
arr= np.array(img200)
flat_arr= arr.ravel()
color_test_set.append(flat_arr)
testData = np.float32(color_test_set)
pre_color = svm1.predict(testData)
return pre_color
def main():
print predict_shape('d:/Emmanu/project-data/tes.jpg')
if __name__ == '__main__':main()
|
norikra/norikra-client-python
|
setup.py
|
Python
|
mit
| 1,304
| 0.001534
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os, sys
import pkg_resources
import norikraclient
long_description = open(os.path.join("README.rst")).read()
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Software Development :: Testing",
"Topic :: System :: Monitoring",
"Topic :: System :: Systems Administration",
]
requires = ['msgpack-python', 'requests']
deplinks = []
setup(
name='norikra-client-python',
version=norikraclient.__version__,
description='norikra-client-python library',
long_description=long_description,
classifiers=classifiers,
keywords=['norikra', 'streaming', 'procesing'],
author='WAKAYAMA Shirou',
author_email='shirou.faw at gmail.com',
url='http://github.com/shirou/norikra-client-python',
download_url='http://pypi.python.org/py
|
pi/norikra-client-python',
license='MIT License',
packages=find_packages(),
include_package_data=True,
install_requir
|
es=requires,
dependency_links=deplinks,
entry_points={
'console_scripts': [
'norikra-client-py = norikraclient.command:main',
],
}
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.