repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
bahmanh/word-rnn-tensorflow
|
utils.py
|
Python
|
mit
| 4,469
| 0.003359
|
# -*- coding: utf-8 -*-
import os
import collections
from six.moves import cPickle
import numpy as np
import re
import itertools
class TextLoader():
def __init__(self, data_dir, batch_size, seq_length):
self.data_dir = data_dir
self.batch_size = batch_size
self.seq_length = seq_length
input_file = os.path.join(data_dir, "input.txt")
vocab_file = os.path.join(data_dir, "vocab.pkl")
tensor_file = os.path.join(data_dir, "data.npy")
# Let's not read voca and data from file. We many change them.
if True or not (os.path.exists(vocab_file) and os.path.exists(tensor_file)):
print("reading text file")
self.preprocess(input_file, vocab_file, tensor_file)
else:
print("loading preprocessed files")
self.load_preprocessed(vocab_file, tensor_file)
self.create_batches()
self.reset_batch_pointer()
def clean_str(self, string):
"""
Tokenization/string cleaning for all datasets except for SST.
Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data
"""
string = re.sub(r"[^가-힣A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
def build_vocab(self, sentences):
"""
Builds a vocabulary mapping from word to index based on the sentences.
Returns vocabulary mapping and inverse vocabulary mapping.
"""
# Build vocabulary
word_counts = collections.Counter(sentences)
# Mapping from index to word
vocabulary_inv = [x[0] for x in word_counts.most_common()]
vocabulary_inv = list(sorted(vocabulary_inv))
|
# Mapping from word to index
vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
return [vocabulary, vocabulary_inv]
def preprocess(self, input_file, vocab_file, tensor_file):
with open(input_file, "r"
|
) as f:
data = f.read()
# Optional text cleaning or make them lower case, etc.
#data = self.clean_str(data)
x_text = data.split()
self.vocab, self.words = self.build_vocab(x_text)
self.vocab_size = len(self.words)
with open(vocab_file, 'wb') as f:
cPickle.dump(self.words, f)
#The same operation liek this [self.vocab[word] for word in x_text]
# index of words as our basic data
self.tensor = np.array(list(map(self.vocab.get, x_text)))
# Save the data to data.npy
np.save(tensor_file, self.tensor)
def load_preprocessed(self, vocab_file, tensor_file):
with open(vocab_file, 'rb') as f:
self.words = cPickle.load(f)
self.vocab_size = len(self.words)
self.vocab = dict(zip(self.words, range(len(self.words))))
self.tensor = np.load(tensor_file)
self.num_batches = int(self.tensor.size / (self.batch_size *
self.seq_length))
def create_batches(self):
self.num_batches = int(self.tensor.size / (self.batch_size *
self.seq_length))
if self.num_batches==0:
assert False, "Not enough data. Make seq_length and batch_size small."
self.tensor = self.tensor[:self.num_batches * self.batch_size * self.seq_length]
xdata = self.tensor
ydata = np.copy(self.tensor)
ydata[:-1] = xdata[1:]
ydata[-1] = xdata[0]
self.x_batches = np.split(xdata.reshape(self.batch_size, -1), self.num_batches, 1)
self.y_batches = np.split(ydata.reshape(self.batch_size, -1), self.num_batches, 1)
def next_batch(self):
x, y = self.x_batches[self.pointer], self.y_batches[self.pointer]
self.pointer += 1
return x, y
def reset_batch_pointer(self):
self.pointer = 0
|
Teddy-Schmitz/temperature_admin
|
models/__init__.py
|
Python
|
mit
| 58
| 0
|
""" Cont
|
ains the dat
|
abase models for the application.
"""
|
schleichdi2/OPENNFR-6.3-CORE
|
bitbake/lib/layerindexlib/cooker.py
|
Python
|
gpl-2.0
| 14,139
| 0.00488
|
# Copyright (C) 2016-2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import logging
import json
from collections import OrderedDict, defaultdict
from urllib.parse import unquote, urlparse
import layerindexlib
import layerindexlib.plugin
logger = logging.getLogger('BitBake.layerindexlib.cooker')
import bb.utils
def plugin_init(plugins):
return CookerPlugin()
class CookerPlugin(layerindexlib.plugin.IndexPlugin):
def __init__(self):
self.type = "cooker"
self.server_connection = None
self.ui_module = None
self.server = None
def _run_command(self, command, path, default=None):
try:
result, _ = bb.process.run(command, cwd=path)
result = result.strip()
except bb.process.ExecutionError:
result = default
return result
def _handle_git_remote(self, remote):
if "://" not in remote:
if ':' in remote:
# This is assumed to be ssh
remote = "ssh://" + remote
else:
# This is assumed to be a file path
remote = "file://" + remote
return remote
def _get_bitbake_info(self):
"""Return a tuple of bitbake information"""
# Our path SHOULD be .../bitbake/lib/layerindex/cooker.py
bb_path = os.path.dirname(__file__) # .../bitbake/lib/layerindex/cooker.py
bb_path = os.path.dirname(bb_path) # .../bitbake/lib/layerindex
bb_path = os.path.dirname(bb_path) # .../bitbake/lib
bb_path = os.path.dirname(bb_path) # .../bitbake
bb_path = self._run_command('git rev-parse --show-toplevel', os.path.dirname(__file__), default=bb_path)
bb_branch = self._run_command('git rev-parse --abbrev-ref HEAD', bb_path, default="<unknown>")
bb_rev = self._run_command('git rev-parse HEAD', bb_path, default="<unknown>")
for remotes in self._run_command('git remote -v', bb_path, default="").split("\n"):
remote = remotes.split("\t")[1].split(" ")[0]
if "(fetch)" == remotes.split("\t")[1].split(" ")[1]:
bb_remote = self._handle_git_remote(remote)
break
else:
bb_remote = self._handle_git_remote(bb_path)
return (bb_remote, bb_branch, bb_rev, bb_path)
def _load_bblayers(self, branches=None):
"""Load the BBLAYERS and related collection information"""
d = self.layerindex.data
if not branches:
raise LayerIndexFetchError("No branches specified for _load_bblayers!")
index = layerindexlib.LayerIndexObj()
branchId = 0
index.branches = {}
layerItemId = 0
index.layerItems = {}
layerBranchId = 0
index.layerBranches = {}
bblayers = d.getVar('BBLAYERS').split()
if not bblayers:
# It's blank! Nothing to process...
return index
collections = d.getVar('BBFILE_COLLECTIONS')
layerconfs = d.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', d)
bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()}
(_, bb_branch, _, _) = self._get_bitbake_info()
for branch in branches:
branchId += 1
index.branches[branchId] = layerindexlib.Branch(index, None)
index.branches[branchId].define_data(branchId, branch, bb_branch)
for entry in collections.split():
layerpath = entry
if entry in bbfile_collections:
layerpath = bbfile_collections[entry]
layername = d.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % entry) or os.path.basename(layerpath)
layerversion = d.getVar('LAYERVERSION_%s' % entry) or ""
layerurl = self._handle_git_remote(layerpath)
layersubdir = ""
layerrev = "<unknown>"
layerbranch = "<unknown>"
if os.path.isdir(layerpath):
layerbasepath = self._run_command('git rev-parse --show-toplevel', layerpath, default=layerpath)
if os.path.abspath(layerpath) != os.path.abspath(layerbasepath):
layersubdir = os.path.abspath(layerpath)[len(layerbasepath) + 1:]
layerbranch = self._run_command('git rev-parse --abbrev-ref HEAD', layerpath, default="<unknown>")
layerrev = self._run_command('git rev-parse HEAD', layerpath, default="<unknown>")
for remotes in self._run_command('git remote -v', layerpath, default="").split("\n"):
if not remotes:
layerurl = self._handle_git_remote
|
(layerpath)
else:
remote = remotes.split("\t")[1].split(" ")[0]
if "(fetch)" == remotes.split("\t")[1].split(" ")[1]:
layerurl = self.
|
_handle_git_remote(remote)
break
layerItemId += 1
index.layerItems[layerItemId] = layerindexlib.LayerItem(index, None)
index.layerItems[layerItemId].define_data(layerItemId, layername, description=layerpath, vcs_url=layerurl)
for branchId in index.branches:
layerBranchId += 1
index.layerBranches[layerBranchId] = layerindexlib.LayerBranch(index, None)
index.layerBranches[layerBranchId].define_data(layerBranchId, entry, layerversion, layerItemId, branchId,
vcs_subdir=layersubdir, vcs_last_rev=layerrev, actual_branch=layerbranch)
return index
def load_index(self, url, load):
"""
Fetches layer information from a build configuration.
The return value is a dictionary containing API,
layer, branch, dependency, recipe, machine, distro, information.
url type should be 'cooker'.
url path is ignored
"""
up = urlparse(url)
if up.scheme != 'cooker':
raise layerindexlib.plugin.LayerIndexPluginUrlError(self.type, url)
d = self.layerindex.data
params = self.layerindex._parse_params(up.params)
# Only reason to pass a branch is to emulate them...
if 'branch' in params:
branches = params['branch'].split(',')
else:
branches = ['HEAD']
logger.debug(1, "Loading cooker data branches %s" % branches)
index = self._load_bblayers(branches=branches)
index.config = {}
index.config['TYPE'] = self.type
index.config['URL'] = url
if 'desc' in params:
index.config['DESCRIPTION'] = unquote(params['desc'])
else:
index.config['DESCRIPTION'] = 'local'
if 'cache' in params:
index.config['CACHE'] = params['cache']
index.config['BRANCH'] = branches
# ("layerDependencies", layerindexlib.LayerDependency)
layerDependencyId = 0
if "layerDependencies" in load:
index.layerDependencies = {}
for layerBranchId in index.layerBranches:
branchName = index.layerBranches[layerBranchId].branch.name
collection = index.layerBranches[layerBranchId].collection
def add_dependency(layerDependencyId, index, deps, required):
try:
depDict = bb.utils.explode_dep_versions2(deps)
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
for dep, oplist in list(depDict.items()):
# We need to search ourselves, so use the _ version...
depLayerBranch = index.find_collection(dep, branches=[branchName])
if not depLayerBranch:
# Missing dependency?!
logger.error('Missing dependency %s (%s)' % (dep, branchName))
continue
# We assume that the opli
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_matsim/sustain_city/tests/matsim_coupeling/matrix_test.py
|
Python
|
gpl-2.0
| 3,343
| 0.012564
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington and Kai Nagel
# See opus_core/LICENSE
import os
import opus_matsim.sustain_city.tests as test_dir
from opus_core.tests import opus_unittest
from opus_core.store.csv_storage import csv_storage
from urbansim.datasets.travel_data_dataset import TravelDataDataset
from numpy import *
import numpy
from opus_core.logger import logger
class MatrixTest(opus_unittest.OpusTestCase):
""" Testing access to travel data values stored in numpy arrays
"""
def setUp(self):
print "Entering setup"
# get sensitivity test path
self.test_dir_path = test_dir.__path__[0]
# get lo
|
cation to travel data table
self.input_directory = os.path.join( self.test_dir_path, 'data', 'travel_cost')
logger.log_status("input_directory: %s" % self.input_directory)
|
# check source file
if not os.path.exists( self.input_directory ):
raise('File not found! %s' % self.input_directory)
print "Leaving setup"
def test_run(self):
print "Entering test run"
# This test loads an exising travel data as a TravelDataSet (numpy array)
# and accesses single (pre-known) values to validate the conversion process
# (numpy array into standard python list).
#
# Here an example:
# my_list = [[1,2,3],
# [4,5,6],
# [7,8,9]]
#
# my_list[0][1] should be = 2
# my_list[2][2] should be = 9
table_name = 'travel_data'
travel_data_attribute = 'single_vehicle_to_work_travel_cost'
# location of pre-calculated MATSim travel costs
in_storage = csv_storage(storage_location = self.input_directory)
# create travel data set (travel costs)
travel_data_set = TravelDataDataset( in_storage=in_storage, in_table_name=table_name )
travel_data_attribute_mat = travel_data_set.get_attribute_as_matrix(travel_data_attribute, fill=31)
# converting from numpy array into a 2d list
travel_list = numpy.atleast_2d(travel_data_attribute_mat).tolist()
# get two values for validation
value1 = int(travel_list[1][1]) # should be = 0
value2 = int(travel_list[2][1]) # should be = 120
logger.log_status('First validation value should be 0. Current value is %i' % value1)
logger.log_status('Second validation value should be 120. Current value is %i' % value2)
self.assertTrue( value1 == 0 )
self.assertTrue( value2 == 120 )
# self.dump_travel_list(travel_list) # for debugging
print "Leaving test run"
def dump_travel_list(self, travel_list):
''' Dumping travel_list for debugging reasons...
'''
dest = os.path.join( os.environ['OPUS_HOME'], 'opus_matsim', 'tmp')
if not os.path.exists(dest):
os.makedirs(dest)
travel = os.path.join(dest, 'travelFile.txt')
f = open(travel, "w")
f.write( str(travel_list) )
f.close()
if __name__ == "__main__":
#mt = MatrixTest() # for debugging
#mt.test_run() # for debugging
opus_unittest.main()
|
stonescar/multi-user-blog
|
blogmods/handlers/new_post.py
|
Python
|
mit
| 772
| 0
|
from main_handler import Handler
from ..models import Posts
from .. import utils
class NewPost(Handler):
|
"""Handler for new post page"""
@utils.login_required
def get(self):
self.render("newpost.html")
@utils.login_required
def post(self
|
):
subject = self.request.get("subject")
content = self.request.get("content")
if subject and content:
p = Posts(subject=subject, content=content, author=self.user)
p.put()
self.redirect("/post/"+str(p.key().id()))
else:
error = "Subject and content is required"
self.render("newpost.html",
subject=subject,
content=content,
error=error)
|
larsmans/numpy
|
numpy/lib/tests/test_io.py
|
Python
|
bsd-3-clause
| 66,065
| 0.000802
|
from __future__ import division, absolute_import, print_function
import sys
import gzip
import os
import threading
from tempfile import mkstemp, NamedTemporaryFile
import time
import warnings
import gc
from io import BytesIO
from datetime import datetime
import numpy as np
import numpy.ma as ma
from numpy.lib._iotools import (ConverterError, ConverterLockError,
ConversionWarning)
from numpy.compat import asbytes, asbytes_nested, bytes, asstr
from nose import SkipTest
from numpy.ma.testutils import (
TestCase, assert_equal, assert_array_equal,
assert_raises, assert_raises_regex, run_module_suite
)
from numpy.testing import assert_warns, assert_, build_err_msg
from numpy.testing.utils import tempdir
class TextIO(BytesIO):
"""Helper IO class.
Writes encode strings to bytes if needed, reads return bytes.
This makes it easier to emulate files opened in binary mode
without needing to explicitly convert strings to bytes in
setting up the test data.
"""
def __init__(self, s=""):
BytesIO.__init__(self, asbytes(s))
def write(self, s):
BytesIO.write(self, asbytes(s))
def writelines(self, lines):
BytesIO.writelines(self, [asbytes(s) for s in lines])
MAJVER, MINVER = sys.version_info[:2]
IS_64BIT = sys.maxsize > 2**32
def strptime(s, fmt=None):
"""This function is available in the datetime module only
from Python >= 2.5.
"""
if sys.version_info[0] >= 3:
return datetime(*time.strptime(s.decode('latin1'), fmt)[:3])
else:
return datetime(*time.strptime(s, fmt)[:3])
class RoundtripTest(object):
def roundtrip(self, save_func, *args, **kwargs):
"""
save_func : callable
Function used to save arrays to file.
file_on_disk : bool
If true, store the file on disk, instead of in a
string buffer.
save_kwds : dict
Parameters passed to `save_func`.
load_kwds : dict
Parameters passed to `numpy.load`.
args : tuple of arrays
Arrays stored to file.
"""
save_kwds = kwargs.get('save_kwds', {})
load_kwds = kwargs.get('load_kwds', {})
file_on_disk = kwargs.get('file_on_disk', False)
if file_on_disk:
target_file = NamedTemporaryFile(delete=False)
load_file = target_file.name
else:
target_file = BytesIO()
load_file = target_file
try:
arr = args
save_func(target_file, *arr, **save_kwds)
target_file.flush()
target_file.seek(0)
if sys.platform == 'win32' and not isinstance(target_file, BytesIO):
target_file.close()
arr_reloaded = np.load(load_file, **load_kwds)
self.arr = arr
self.arr_reloaded = arr_reloaded
finally:
if not isinstance(target_file, BytesIO):
target_file.close()
# holds an open file descriptor so it can't be deleted on win
if not isinstance(arr_reloaded, np.lib.npyio.NpzFile):
os.remove(target_file.name)
def check_roundtrips(self, a):
self.roundtrip(a)
self.roundtrip(a, file_on_disk=True)
self.roundtrip(np.asfortranarray(a))
self.roundtrip(np.asfortranarray(a), file_on_disk=True)
if a.shape[0] > 1:
# neither C nor Fortran contiguous for 2D arrays or more
self.roundtrip(np.asfortranarray(a)[1:])
self.roundtrip(np.asfortranarray(a)[1:], file_on_disk=True)
def test_array(self):
a = np.array([], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], int)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.csingle)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.cdouble)
self.check_roundtrips(a)
def test_array_object(self):
if sys.version_info[:2] >= (2, 7):
a = np.array([], object)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], object)
self.check_roundtrips(a)
# Fails with UnpicklingError: could not find MARK on Python 2.6
def test_1D(self):
a = np.array([1, 2, 3, 4], int)
self.roundtrip(a)
@np.testing.dec.knownfailureif(sys.platform == 'win32', "Fail on Win32")
def test_mmap(self):
a = np.array([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
a = np.asfortranarray([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
def test_record(self):
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
self.check_roundtrips(a)
def test_format_2_0(self):
dt = [(("%d" % i) * 100, float) for i in range(500)]
a = np.ones(1000, dtype=dt)
with warnings.catch_warnings(record=True):
warnings.filterwarnings('always', '', UserWarning)
self.check_roundtrips(a)
class TestSaveLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.save, *args, **kwargs)
assert_equal(self.arr[0], self.arr_reloaded)
assert_equal(self.arr[0].dtype, self.arr_reloaded.dtype)
assert_equal(self.arr[0].flags.fnc, self.arr_reloaded.flags.fnc)
class TestSavezLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.savez, *args, **kwargs)
try:
for n, arr in enumerate(self.arr):
reloaded = self.arr_reloaded['arr_%d' % n]
assert_equal(arr, reloaded)
assert_equal(arr.dtype, reloaded.dtype)
assert_equal(arr.flags.fnc, reloaded.flags.fnc)
finally:
# delete tempfile, must be done here on windows
if self.arr_reloaded.fid:
self.arr_reloaded.fid.close()
os.remove(self.arr_reloaded.fid.name)
@np.testing.dec.skipif(not IS_64BIT, "Works only with 64bit systems")
@np.testing.dec.slow
def test_big_arrays(self):
L = (1 << 31) + 100000
a = np.empty(L, dtype=np.uint8)
with tempdir(prefix="numpy_test_big_arrays_") as tmpdir:
tmp = os.path.join(tmpdir, "file.npz")
np.savez(tmp, a=a)
del a
npfile = np.load(tmp)
a = npfile['a']
npfile.close()
def test_multiple_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
self.roundtrip(a, b)
def test_named_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.ar
|
ray([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(a, l['file_a'])
assert_equal(b, l['file_b'])
def test_BagObj(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
|
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(sorted(dir(l.f)), ['file_a','file_b'])
assert_equal(a, l.f.file_a)
assert_equal(b, l.f.file_b)
def test_savez_filename_clashes(self):
# Test that issue #852 is fixed
# and savez functions in multithreaded environment
def writer(error_list):
fd, tmp = mkstemp(suffix='.npz')
os.close(fd)
try:
arr = np.random.randn(500, 500)
try:
np.savez(tmp, arr=arr)
except OSError as err:
error_list.append(err)
finally:
os.remove(tmp)
errors = []
threads = [thre
|
sthirugn/robottelo
|
tests/foreman/api/test_template_combination.py
|
Python
|
gpl-3.0
| 3,545
| 0
|
# -*- coding: utf-8 -*-
"""Tests for template combination
@Requirement: TemplateCombination
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: API
@TestType: Functional
@CaseImportance: Medium
@Upstream: No
"""
from nailgun import entities
from requests.exceptions import HTTPError
from robottelo.decorators import skip_if_bug_open, tier1
from robottelo.test import APITestCase
class TemplateCombinationTestCase(APITestCase):
"""Implements TemplateCombination tests"""
@classmethod
def setUpClass(cls):
"""Create hostgroup and environment to be used on
TemplateCombination creation
"""
super(TemplateCombinationTestCase, cls).setUpClass()
cls.hostgroup = entities.HostGroup().create()
cls.env = entities.Environment().create()
@classmethod
def tearDownClass(cls):
"""Delete hostgroup and environment used on
TemplateCombination creation
"""
super(TemplateCombinationTestCase, cls).tearDownClass()
for entity in (cls.hostgroup, cls.env):
entity.delete()
def setUp(self):
"""Create ConfigTemplate and TemplateConfiguration for each test"""
super(TemplateCombinationTestCase, self).setUp()
self.template = entities.ConfigTemplate(
snippet=False,
template_combinations=[{
'hostgroup_id': self.hostgroup.id,
'environment_id': self.env.id
}])
self.template = self.template.create()
template_combination_dct = self.template.template_combinations[0]
self.template_combination = entities.TemplateCombination(
id=template_combination_dct['id'],
environment=self.env,
config_template=self.template,
hostgroup=self.hostgroup
)
def tearDown(self):
|
"""Delete ConfigTemplate used on tests"""
super(TemplateCombinationTestCase, self).tearDown()
# Clean combination if it is not already deleted
try:
self.template_combination.delete()
except HTTPError:
pass
self.template.delete()
@tier1
|
@skip_if_bug_open('bugzilla', 1369737)
def test_positive_get_combination(self):
"""Assert API template combination get method works.
@id: 2447674e-c37e-11e6-93cb-68f72889dc7f
@Setup: save a template combination
@Assert: TemplateCombination can be retrieved through API
"""
combination = self.template_combination.read()
self.assertIsInstance(combination, entities.TemplateCombination)
self.assertEqual(self.template.id, combination.config_template.id)
self.assertEqual(self.env.id, combination.environment.id)
self.assertEqual(self.hostgroup.id, combination.hostgroup.id)
@tier1
@skip_if_bug_open('bugzilla', 1369737)
def test_positive_delete_combination(self):
"""Assert API template combination delete method works.
@id: 3a5cb370-c5f6-11e6-bb2f-68f72889dc7f
@Setup: save a template combination
@Assert: TemplateCombination can be deleted through API
"""
combination = self.template_combination.read()
self.assertIsInstance(combination, entities.TemplateCombination)
self.assertEqual(1, len(self.template.read().template_combinations))
combination.delete()
self.assertRaises(HTTPError, combination.read)
self.assertEqual(0, len(self.template.read().template_combinations))
|
ubunteroz/foreman
|
foreman/utils/population.py
|
Python
|
gpl-3.0
| 30,432
| 0.006145
|
# foreman imports
import hashlib
from foreman.model import User, ForemanOptions, UserRoles, Case, UserCaseRoles, CaseType, CaseClassification, CaseStatus
from foreman.model import TaskType, Task, TaskStatus, UserTaskRoles, EvidenceType, Evidence, TaskUpload, EvidenceStatus
from foreman.model import EvidencePhotoUpload, Department, Team
from utils import session, config, ROOT_DIR
from random import randint
from os import path, mkdir, stat
import shutil
from datetime import datetime, timedelta
def create_admin_user():
admin = User("administrator", "changeme", "The", "Administrator", config.get('admin', 'admin_email'),
validated=True)
session.add(admin)
session.flush()
admin.team = Team.get(1)
admin.job_title = "Administrator"
admin_role = UserRoles(admin, "Administrator", False)
session.add(admin_role)
session.flush()
for role in UserRoles.roles:
if role != "Administrator":
new_role = UserRoles(admin, role, True)
session.add(new_role)
session.flush()
admin.add_change(admin)
session.flush()
session.commit()
return admin
def load_initial_values():
opts = ForemanOptions("%d %b %Y %H:%M:%S %Z", r"C:\Foreman", "DateNumericIncrement", "NumericIncrement",
"Your Company Name Here", "Investigation Team Name Here")
session.add(opts)
session.flush()
session.commit()
dep = Department("Forensics Department")
session.add(dep)
session.commit()
t = Team("Forensics Team", dep)
session.add(t)
session.commit()
def load_initial_values_test():
opts = ForemanOptions("%d %b %Y %H:%M:%S", r"C:\Foreman", "FromList", "NumericIncrement", "Wordwide Forensics Inc",
"Investigations & Digital Forensics Department", c_leading_zeros=3,
t_leading_zeros=2,
c_list_location=path.abspath(path.join(ROOT_DIR, "utils", "test_case_names.txt")))
session.add(opts)
session.flush()
session.commit()
deps = [('IT Security', ['Investigations & Digital Forensics', 'CERT Team', 'Security Operations Centre']),
('Human Resources', ['HR Complaints']), ('Internal Audit', ['Fraud Prevention', 'Investigations']),
('Legal', ['Litigation'])]
for department, teams in deps:
dep = Department(department)
session.add(dep)
session.commit()
for team in teams:
t = Team(team, dep)
session.add(t)
session.commit()
def create_test_investigators(admin):
u1 = User("holmess", "password", "Sherlock", "Holmes", "sherlock.holmes@example.org", validated=True)
u2 = User("barnabyt", "password", "Tom", "Barnaby", "thomas.barnaby@example.org", validated=True)
u3 = User("wexfordr", "password", "Reginald", "Wexford", "reginald.wexford@example.org", validated=True)
u4 = User("bergeracj", "password", "Jim", "Bergerac", "jim.bergerac@example.org", validated=True)
u5 = User("cagneyc", "password", "Christine", "Cagney", "christine.cagney@example.orgk", validated=True)
u6 = User("columbof", "password", "Frank", "Columbo", "frank.columbo@example.org", validated=True)
u7 = User("poiroth", "password", "Hercule", "Poirot", "hercule.poirot@example.org", validated=True)
u8 = User("frostj", "password", "Jack", "Frost", "jack.frost@example.org", validated=True)
u9 = User("huntg", "password", "Gene", "Hunt", "gene.hunt@example.org", validated=True)
u10 = User("lunds", "password", "Sarah", "Lund", "sarah.lund@example.org", validated=True)
u11 = User("mcnultyj", "password", "Jimmy", "McNulty", "james.mcnulty@example.org", validated=True)
u12 = User("montalbanos", "password", "Salvo", "Montalbano", "salvo.montalbano@example.org", validated=True)
u13 = User("morsee", "password", "Endeavour", "Morse", "endeavour.morse@example.org", validated=True)
u14 = User("rebusj", "password", "John", "Rebus", "john.rebus@example.org", validated=True)
u15 = User("taylorm", "password", "Mac", "Taylor", "mac.taylor@example.org", validated=True)
session.add(u1)
session.add(u2)
session.add(u3)
session.add(u4)
session.add(u5)
session.add(u6)
session.add(u7)
session.add(u8)
session.add(u9)
session.add(u10)
session.add(u11)
session.add(u12)
session.add(u13)
session.add(u14)
session.add(u15)
session.flush()
u1.add_change(admin)
u2.add_change(admin)
u3.add_change(admin)
u4.add_change(admin)
u5.add_change(admin)
u6.add_change(admin)
u7.add_change(admin)
u8.add_change(admin)
u9.add_change(admin)
u10.add_change(admin)
u11.add_change(admin)
u12.add_change(admin)
u13.add_change(admin)
u14.add_change(admin)
u15.add_change(admin)
session.flush()
session.commit()
investigators = [u1, u2, u3, u4, u5, u6, u7, u8, u9, u10, u11, u12, u13, u14, u15]
managers=[]
for u in investigators:
sen = randint(0,5)
if sen == 5:
u.job_title = "Forensic Investigations Manager"
managers.append(u)
elif sen == 3 or sen == 4:
u.job_title = "Senior Forensic Investigator"
else:
u.job_title = "Forensic Investigator"
u.team = Team.get_filter_by(team='Investigations & Digital Forensics').first()
u.add_change(admin)
ur1 = UserRoles(u, "Investigator", False)
ur2 = UserRoles(u, "QA", False)
ur3 = UserRoles(u, "Case Manager", True)
ur4 = UserRoles(u, "Requester", True)
ur5 = UserRoles(u, "Authoriser", True)
ur6 = UserRoles(u, "Administrator", True)
session.add(ur1)
session.add(ur2)
session.add(ur3)
session.add(ur4)
session.add(ur5)
session.add(ur6)
session.flush()
ur1.add_change(admin)
ur2.add_change(admin)
ur3.add_change(admin)
ur4.add_change(admin)
ur5.add_change(admin)
ur6.add_change(admin)
session.flush()
session.commit()
for inv in investigators:
if inv in managers:
inv.manager = admin
else:
inv.manager = admin
if len(managers) > 1:
inv.manager = managers[randint(0, len(managers)-1)]
print "15 Investigators added to Foreman."
return investigators
def create_test_case_managers(admin):
u1 = User("gatesw", "password", "Bill", "Gates", "william.gates@example.org", validated=True)
u2 = User("heluc", "password", "Carlos", "Helu", "carlos.slim-helu@example.org", middle="Slim", validated=True)
u3 = User("geonaa", "password", "Amancio", "Gaona", "amancio.gaona@example.org", validated=True)
u4 = User("buffettw", "password", "Warren", "Buffett", "warren.buffett@example.org", validated=True)
u5 = User("desmaraisj", "password", "Jacqueline", "Desmarais", "jacqueline.desmarais@example.orgk", validated=True)
u6 = User("ellisonl", "password", "Larry", "Ellison", "larry.ellison@example.org", validated=True)
u7 = User("kochc", "password", "Charles", "Koch", "charles.koch@example.org", validated=True)
u8 = User("kochd", "password", "David", "Koch", "david.koch@example.org", validated=True)
u9 = User("adelson", "password", "Sheldon", "Adelson", "sheldon.adelson@example.org", validated=True)
u10 = User("walton", "password", "Christy", "Walton", "christy.walton@example.org", validated=True)
session.add(u1)
session.add(u2)
session.add(u3)
session.add(u4)
session.add(u5)
session.add(u6)
session.add(u7)
session.add(u8)
session.a
|
dd(u9)
sessio
|
n.add(u10)
session.flush()
u1.add_change(admin)
u2.add_change(admin)
u3.add_change(admin)
u4.add_change(admin)
u5.add_change(admin)
u6.add_change(admin)
u7.add_change(admin)
u8.add_change(admin)
u9.add_change(admin)
u10.add_change(admin)
session.flush()
session.commit()
case_managers = [u1, u2, u3, u4, u5, u6, u7, u8, u9, u10]
managers = []
for u in case_managers:
sen = randint(0,5)
if sen == 5:
|
chop-dbhi/varify-data-warehouse
|
vdw/variants/migrations/0012_auto__add_field_varianteffect_segment.py
|
Python
|
bsd-2-clause
| 17,850
| 0.008179
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'VariantEffect.segment'
db.add_column('variant_effect', 'segment',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'VariantEffect.segment'
db.delete_column('variant_effect', 'segment')
models = {
'genes.exon': {
'Meta': {'object_name': 'Exon', 'db_table': "'exon'"},
'end': ('django.db.models.fields.IntegerField', [], {}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.IntegerField', [], {})
},
'genes.gene': {
'Meta': {'object_name': 'Gene', 'db_table': "'gene'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'db_table': "'gene_pubmed'", 'symmetrical': 'False'}),
'chr': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genome.Chromosome']"}),
'families': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.GeneFamily']", 'symmetrical': 'False', 'blank': 'True'}),
'hgnc_id': ('django.db.models.fields.IntegerField',
|
[], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [
|
], {'blank': 'True'}),
'phenotypes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['phenotypes.Phenotype']", 'through': "orm['genes.GenePhenotype']", 'symmetrical': 'False'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'synonyms': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Synonym']", 'db_table': "'gene_synonym'", 'symmetrical': 'False'})
},
'genes.genefamily': {
'Meta': {'object_name': 'GeneFamily', 'db_table': "'gene_family'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'})
},
'genes.genephenotype': {
'Meta': {'object_name': 'GenePhenotype', 'db_table': "'gene_phenotype'"},
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']"}),
'hgmd_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phenotype': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['phenotypes.Phenotype']"})
},
'genes.synonym': {
'Meta': {'object_name': 'Synonym', 'db_table': "'synonym'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'genes.transcript': {
'Meta': {'object_name': 'Transcript', 'db_table': "'transcript'"},
'coding_end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_end_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'coding_start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'coding_start_status': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exon_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'exons': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['genes.Exon']", 'db_table': "'transcript_exon'", 'symmetrical': 'False'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['genes.Gene']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'refseq_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'start': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'strand': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'})
},
'genome.chromosome': {
'Meta': {'ordering': "['order']", 'object_name': 'Chromosome', 'db_table': "'chromosome'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'})
},
'literature.pubmed': {
'Meta': {'object_name': 'PubMed', 'db_table': "'pubmed'"},
'pmid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'})
},
'phenotypes.phenotype': {
'Meta': {'object_name': 'Phenotype', 'db_table': "'phenotype'"},
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['literature.PubMed']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hpo_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1000'})
},
'variants.effect': {
'Meta': {'ordering': "['order']", 'object_name': 'Effect', 'db_table': "'effect'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectImpact']", 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['variants.EffectRegion']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effectimpact': {
'Meta': {'ordering': "['order']", 'object_name': 'EffectImpact', 'db_table': "'effect_impact'"},
'code': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'variants.effec
|
jeroanan/GameCollection
|
UI/Handlers/Exceptions/UnrecognisedHandlerException.py
|
Python
|
gpl-3.0
| 56
| 0
|
class
|
UnrecognisedHandlerException(Exception):
|
pass
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/hgext/extdiff.py
|
Python
|
gpl-3.0
| 12,584
| 0.001271
|
# extdiff.py - external diff program support for mercurial
#
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''command to allow external programs to compare revisions
The extdiff Mercurial extension allows you to use external programs
to compare revisions, or revision with working directory. The external
diff programs are called with a configurable set of options and two
non-option arguments: paths to directories containing snapshots of
files to compare.
The extdiff extension also allows you to configure new diff commands, so
you do not need to type :hg:`extdiff -p kdiff3` always. ::
[extdiff]
# add new command that runs GNU diff(1) in 'context diff' mode
cdiff = gdiff -Nprc5
## or the old way:
#cmd.cdiff = gdiff
#opts.cdiff = -Nprc5
# add new command called vdiff, runs kdiff3
vdiff = kdiff3
# add new command called meld, runs meld (no need to name twice)
meld =
# add new command called vimdiff, runs gvimdiff with DirDiff plugin
# (see http://www.vim.org/scripts/script.php?script_id=102) Non
# English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
# your .vimrc
vimdiff = gvim -f "+next" \\
"+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
Tool arguments can include variables that are expanded at runtime::
$parent1, $plabel1 - filename, descriptive label of first parent
$child, $clabel - filename, descriptive label of child revision
$parent2, $plabel2 - filename, descriptive label of second parent
$root - repository root
$parent is an alias for $parent1.
The extdiff extension will look in your [diff-tools] and [merge-tools]
sections for diff tool arguments, when none are specified in [extdiff].
::
[extdiff]
kdiff3 =
[diff-tools]
kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
You can use -I/-X and list of file or directory names like normal
:hg:`diff` command. The extdiff extension makes snapshots of only
needed files, so running the external diff program will actually be
pretty fast (at least faster than having to compare the entire tree).
'''
from mercurial.i18n import _
from mercurial.node import short, nullid
from mercurial import scmutil, scmutil, util, commands, encoding
import os, shlex, shutil, tempfile, re
def snapshot(ui, repo, files, node, tmproot):
'''snapshot files as of some revision
if not using snapshot, -I/-X does not work and recursive diff
in tools like kdiff3 and meld displays too many files.'''
dirname = os.path.basename(repo.root)
if dirname == "":
dirname = "root"
if node is not None:
dirname = '%s.%s' % (dirname, short(node))
base = os.path.join(tmproot, dirname)
os.mkdir(base)
if node is not None:
ui.note(_('making snapshot of %d files from rev %s\n') %
(len(files), short(node)))
else:
ui.note(_('making snapshot of %d files from working directory\n') %
(len(files)))
wopener = scmutil.opener(base)
fns_and_mtime = []
ctx = repo[node]
for fn in files:
wfn = util.pconvert(fn)
if not wfn in ctx:
# File doesn't exist; could be a bogus modify
continue
ui.note(' %s\n' % wfn)
dest = os.path.join(base, wfn)
fctx = ctx[wfn]
data = repo.wwritedata(wfn, fctx.data())
if 'l' in fctx.flags():
wopener.symlink(data, wfn)
else:
wopener.write(wfn, data)
if 'x' in fctx.flags():
util.setflags(dest, False, True)
if node is None:
fns_and_mtime.append((dest, repo.wjoin(fn),
os.lstat(dest).st_mtime))
return dirname, fns_and_mtime
def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
'''Do the actuall diff:
- copy to a temp structure if diffing 2 internal revisions
- copy to a temp structure if diffing working revision with
another one and more than 1 file is changed
- just invoke the diff for a single file in the working dir
'''
revs = opts.get('rev')
change = opts.get('change')
args = ' '.join(diffopts)
do3way = '$parent2' in args
if r
|
evs and change:
msg = _('cannot specify --rev and --change at the same time')
raise util.Abort(msg)
elif change:
node2 = scmutil.revsingle(repo, change, None).node()
node1a, node1b = repo.changelog.parents(node2)
else:
node1a, node2 = scmutil.revpair(repo, revs)
if n
|
ot revs:
node1b = repo.dirstate.p2()
else:
node1b = nullid
# Disable 3-way merge if there is only one parent
if do3way:
if node1b == nullid:
do3way = False
matcher = scmutil.match(repo[node2], pats, opts)
mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
if do3way:
mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
else:
mod_b, add_b, rem_b = set(), set(), set()
modadd = mod_a | add_a | mod_b | add_b
common = modadd | rem_a | rem_b
if not common:
return 0
tmproot = tempfile.mkdtemp(prefix='extdiff.')
try:
# Always make a copy of node1a (and node1b, if applicable)
dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
rev1a = '@%d' % repo[node1a].rev()
if do3way:
dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
rev1b = '@%d' % repo[node1b].rev()
else:
dir1b = None
rev1b = ''
fns_and_mtime = []
# If node2 in not the wc or there is >1 change, copy it
dir2root = ''
rev2 = ''
if node2:
dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
rev2 = '@%d' % repo[node2].rev()
elif len(common) > 1:
#we only actually need to get the files to copy back to
#the working dir in this case (because the other cases
#are: diffing 2 revisions or single file -- in which case
#the file is already directly passed to the diff tool).
dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
else:
# This lets the diff tool open the changed file directly
dir2 = ''
dir2root = repo.root
label1a = rev1a
label1b = rev1b
label2 = rev2
# If only one change, diff the files instead of the directories
# Handle bogus modifies correctly by checking if the files exist
if len(common) == 1:
common_file = util.localpath(common.pop())
dir1a = os.path.join(tmproot, dir1a, common_file)
label1a = common_file + rev1a
if not os.path.isfile(dir1a):
dir1a = os.devnull
if do3way:
dir1b = os.path.join(tmproot, dir1b, common_file)
label1b = common_file + rev1b
if not os.path.isfile(dir1b):
dir1b = os.devnull
dir2 = os.path.join(dir2root, dir2, common_file)
label2 = common_file + rev2
# Function to quote file/dir names in the argument string.
# When not operating in 3-way mode, an empty string is
# returned for parent2
replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
plabel1=label1a, plabel2=label1b,
clabel=label2, child=dir2,
root=repo.root)
def quote(match):
key = match.group()[1:]
if not do3way and key == 'parent2':
return ''
return util.shellquote(replace[key])
# Match parent2 first, so 'parent1?' will match both parent1 and parent
regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
if not do3way and not re.
|
unibet/unbound-ec2
|
tests/unit/test_config.py
|
Python
|
isc
| 8,032
| 0.003984
|
import os
import ast
from tests import unittest
from unbound_ec2 import config
class TestConfig(unittest.TestCase):
def setUp(self):
self.config = config.UnboundEc2Conf()
def tearDown(self):
os.environ['UNBOUND_ZONE'] = config.DEFAULT_ZONE
os.environ['UNBOUND_REVERSE_ZONE'] = config.DEFAULT_REVERSE_ZONE
os.environ['UNBOUND_TTL'] = config.DEFAULT_TTL
os.environ['UNBOUND_CACHE_TTL'] = config.DEFAULT_CACHE_TTL
os.environ['UNBOUND_SERVER_TYPE'] = config.DEFAULT_SERVER_TYPE
os.environ['UNBOUND_LOOKUP_TYPE'] = config.DEFAULT_LOOKUP_TYPE
os.environ['UNBOUND_LOOKUP_TAG_NAME_INCLUDE_DOMAIN'] = config.DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN
os.environ['AWS_DEFAULT_REGION'] = config.DEFAULT_AWS_REGION
os.environ['UNBOUND_EC2_CONF'] = config.DEFAULT_CONF_FILE
os.environ['UNBOUND_IP_ORDER'] = config.DEFAULT_IP_ORDER
self.config = None
def test_init(self):
self.assertTrue(hasattr(self.config, 'ec2'))
self.assertTrue(hasattr(self.config, 'main'))
self.assertTrue(hasattr(self.config, 'lookup'))
self.assertTrue(hasattr(self.config, 'lookup_filters'))
self.assertTrue(hasattr(self.config, 'server'))
def test_init_conf_file(self):
fixture_conf_file = os.path.join(os.path.dirname(__file__), 'data', 'unbound_ec2.conf')
self.config = config.UnboundEc2Conf(fixture_conf_file)
self.assertEqual(self.config.conf_file, fixture_conf_file)
def test_init_env_conf_file(self):
fixture_conf_file = os.path.join(os.path.dirname(__file__), 'data', 'unbound_ec2.conf')
|
os.environ['UNBOUND_EC2_CONF'] = fixture_conf_file
self.config = config.UnboundEc2Conf()
self.assertEqual(self.config.conf_file, fixture_conf_file)
def test_set_defaults(self):
self.config.set_defaults()
self.assertIn('aws_region', self.confi
|
g.ec2)
self.assertIn('zone', self.config.main)
self.assertIn('reverse_zone', self.config.main)
self.assertIn('ttl', self.config.main)
self.assertIn('cache_ttl', self.config.main)
self.assertIn('type', self.config.server)
self.assertIn('type', self.config.lookup)
self.assertIn('tag_name_include_domain', self.config.lookup)
self.assertEqual(self.config.ec2['aws_region'], config.DEFAULT_AWS_REGION)
self.assertEqual(self.config.main['zone'], config.DEFAULT_ZONE)
self.assertEqual(self.config.main['reverse_zone'], config.DEFAULT_REVERSE_ZONE)
self.assertEqual(self.config.main['ttl'], int(config.DEFAULT_TTL))
self.assertEqual(self.config.main['ip_order'], config.DEFAULT_IP_ORDER)
self.assertEqual(self.config.server['type'], config.DEFAULT_SERVER_TYPE)
self.assertEqual(self.config.lookup['type'], config.DEFAULT_LOOKUP_TYPE)
self.assertEqual(self.config.lookup['tag_name_include_domain'],
bool(config.DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN))
self.assertEqual(self.config.lookup_filters, ast.literal_eval(config.DEFAULT_LOOKUP_FILTERS))
def test_set_defaults_env_overwrite(self):
os.environ['UNBOUND_ZONE'] = 'BOGUS_TLD'
os.environ['UNBOUND_REVERSE_ZONE'] = 'BOGUS_REVERSE_ZONE'
os.environ['UNBOUND_TTL'] = 'BOGUS_TTL'
os.environ['UNBOUND_CACHE_TTL'] = 'BOGUS_CACHE_TTL'
os.environ['AWS_DEFAULT_REGION'] = 'BOGUS_AWS_REGION'
os.environ['UNBOUND_SERVER_TYPE'] = 'BOGUS_SERVER_TYPE'
os.environ['UNBOUND_LOOKUP_TYPE'] = 'BOGUS_LOOKUP_TYPE'
os.environ['UNBOUND_LOOKUP_TAG_NAME_INCLUDE_DOMAIN'] = 'BOGUS_LOOKUP_TAG_NAME_INCLUDE_DOMAIN'
os.environ['UNBOUND_IP_ORDER'] = 'BOGUS_IP_ORDER'
self.config.set_defaults()
self.assertEqual(self.config.ec2['aws_region'], 'BOGUS_AWS_REGION')
self.assertEqual(self.config.main['zone'], 'BOGUS_TLD')
self.assertEqual(self.config.main['reverse_zone'], 'BOGUS_REVERSE_ZONE')
self.assertEqual(self.config.main['ttl'], 'BOGUS_TTL')
self.assertEqual(self.config.main['cache_ttl'], 'BOGUS_CACHE_TTL')
self.assertEqual(self.config.main['ip_order'], 'BOGUS_IP_ORDER')
self.assertEqual(self.config.server['type'], 'BOGUS_SERVER_TYPE')
self.assertEqual(self.config.lookup['type'], 'BOGUS_LOOKUP_TYPE')
self.assertEqual(self.config.lookup['tag_name_include_domain'], 'BOGUS_LOOKUP_TAG_NAME_INCLUDE_DOMAIN')
self.assertEqual(self.config.lookup_filters, ast.literal_eval(config.DEFAULT_LOOKUP_FILTERS))
def test_parse_full(self):
fixture_conf_file = os.path.join(os.path.dirname(__file__), 'data', 'unbound_ec2_full.conf')
self.config = config.UnboundEc2Conf(fixture_conf_file)
self.assertTrue(self.config.parse())
self.assertIn('aws_region', self.config.ec2)
self.assertIn('zone', self.config.main)
self.assertIn('ttl', self.config.main)
self.assertIn('cache_ttl', self.config.main)
self.assertEqual(self.config.ec2['aws_region'], 'BOGUS_AWS_REGION_FROM_CONF_FILE')
self.assertEqual(self.config.main['zone'], 'BOGUS_ZONE_FROM_CONF_FILE')
self.assertEqual(self.config.main['reverse_zone'], 'BOGUS_REVERSE_ZONE_FROM_CONF_FILE')
self.assertEqual(self.config.main['ttl'], 'BOGUS_TTL_FROM_CONF_FILE')
self.assertEqual(self.config.main['cache_ttl'], 'BOGUS_CACHE_TTL_FROM_CONF_FILE')
self.assertEqual(self.config.main['ip_order'], 'BOGUS_IP_ORDER_FROM_CONF_FILE')
self.assertEqual(self.config.server['type'], 'BOGUS_SERVER_TYPE_FROM_FILE')
self.assertEqual(self.config.lookup['type'], 'BOGUS_LOOKUP_TYPE_FROM_FILE')
self.assertEqual(self.config.lookup['tag_name_include_domain'], 'BOGUS_TAG_NAME_INCLUDE_DOMAIN_FROM_FILE')
self.assertEqual(self.config.lookup_filters, {'bogus-key': 'bogus-value-from-file'})
def test_parse_partial(self):
fixture_conf_file = os.path.join(os.path.dirname(__file__), 'data', 'unbound_ec2_partial.conf')
self.config = config.UnboundEc2Conf(fixture_conf_file)
self.assertTrue(self.config.parse())
self.assertIn('aws_region', self.config.ec2)
self.assertIn('zone', self.config.main)
self.assertNotIn('reverse_zone', self.config.main)
self.assertNotIn('ttl', self.config.main)
self.assertNotIn('cache_ttl', self.config.main)
self.assertNotIn('tag_name_include_domain', self.config.lookup)
self.assertEqual(self.config.ec2['aws_region'], 'BOGUS_AWS_REGION_FROM_CONF_FILE')
self.assertEqual(self.config.main['zone'], 'BOGUS_ZONE_FROM_CONF_FILE')
self.assertEqual(self.config.server['type'], 'BOGUS_SERVER_TYPE_FROM_FILE')
self.assertEqual(self.config.lookup['type'], 'BOGUS_LOOKUP_TYPE_FROM_FILE')
self.assertFalse(self.config.lookup_filters)
def test_parse_partial_with_defaults(self):
fixture_conf_file = os.path.join(os.path.dirname(__file__), 'data', 'unbound_ec2_partial.conf')
self.config = config.UnboundEc2Conf(fixture_conf_file)
self.config.set_defaults()
self.assertTrue(self.config.parse())
self.assertEqual(self.config.ec2['aws_region'], 'BOGUS_AWS_REGION_FROM_CONF_FILE')
self.assertEqual(self.config.main['zone'], 'BOGUS_ZONE_FROM_CONF_FILE')
self.assertEqual(self.config.main['reverse_zone'], config.DEFAULT_REVERSE_ZONE)
self.assertEqual(self.config.main['ttl'], int(config.DEFAULT_TTL))
self.assertEqual(self.config.main['cache_ttl'], int(config.DEFAULT_CACHE_TTL))
self.assertEqual(self.config.server['type'], 'BOGUS_SERVER_TYPE_FROM_FILE')
self.assertEqual(self.config.lookup['type'], 'BOGUS_LOOKUP_TYPE_FROM_FILE')
self.assertEqual(self.config.lookup['tag_name_include_domain'],
bool(config.DEFAULT_LOOKUP_TAG_NAME_INCLUDE_DOMAIN))
self.assertEqual(self.config.lookup_filters, ast.literal_eval(config.DEFAULT_LOOKUP_FILTERS))
|
agoose77/hivesystem
|
bee/types.py
|
Python
|
bsd-2-clause
| 16,351
| 0.003241
|
from __future__ import print_function
import functools
_modes = ["push", "pull"]
_types = set((
"event", "exception",
"int", "float", "bool", "str",
"mstr", "id",
"object",
"block", "blockcontrol", "blockmodel",
"expression",
"bee",
))
_objecttypes = "object", "mstr", "id", "block", "blockcontrol", "blockmodel", "expression", "bee"
_typemap = {
"String": "str",
"Material": "str",
"Integer": "int",
"Float": "float",
"Bool": "bool",
"FunctionLink": ("str", "function"),
"ObjectLink": ("str", "object"),
"ModuleLink": ("str", "module"),
"Filename": ("str", "filename"),
}
from .event import event
from .mstr import mstr
from .reference import reference
import spyder
def stringtupleparser(*tokens):
if len(tokens) == 1:
tokens = tokens[0]
string_value = str(tokens).strip()
if not string_value:
return ""
# String outer parenthesis
if string_value[0] == "(" and string_value[-1] == ")":
string_value = string_value[1:-1]
parenthesis = 0
quote = None
tokens = []
last = 0
found = False
skip = 0
for index, character in enumerate(string_value):
if character == "," and quote is None:
if parenthesis == 0:
tokens.append(string_value[last:index - skip])
last = index + 1
continue
skip = 0
if character == "(":
parenthesis += 1
found = True
# if lnr == last: last += 1
elif character == ")":
parenthesis -= 1
elif (character == "'" or character == '"') and not parenthesis:
if quote is None and last == index:
last += 1
quote = character
elif quote == character:
quote = None
skip = 1
elif character == " " and quote is None and last == index:
last += 1
if parenthesis > 0:
return string_value
if last < len(string_value) - skip:
# print("FINALTOK", value[last:len(value)-skip], last, skip)
tokens.append(string_value[last:len(string_value) - skip])
# print("TOKENS",v, tokens)
if not found and len(tokens) == 1:
return tokens[0]
ret = []
for token in tokens:
ret.append(stringtupleparser(token))
return tuple(ret)
eventparser = event
def boolparser(value):
if value in (False, "False", 0, "0", ""):
return False
if value in (True, "True", 1, "1"):
return True
raise ValueError(value)
def spyderparser(spydertypename, value):
import spyder, Spyder
spyclass = getattr(Spyder, spydertypename)
if not isinstance(value, str):
return spyclass(value)
try:
typ, parsed = spyder.core.parse(value)
if typ != spydertypename:
raise TypeError
return spyclass.fromdict(parsed)
except:
return spyclass(value)
def generic_constructor(type_=None):
if type_ is None:
def generic_constructor(v):
from .beewrapper import beewrapper
if isinstance(v, beewrapper):
return v
if isinstance(v, reference):
return v.obj
try:
return type(v)(v)
except TypeError:
return v
return generic_constructor
def generic_constructor(v):
from .beewrapper import beewrapper
if isinstance(v, beewrapper):
return v
if isinstance(v, reference):
return v.obj
try:
return type_(v)
except TypeError:
return v
return generic_constructor
_parametertypes = {
"bee": ("bee", generic_constructor()),
"int": ("int", int),
"float": ("float", float),
"bool": ("bool", boolparser),
"str": ("str", str),
"mstr": ("mstr", generic_constructor(mstr)),
"id": ("str", str),
"object": ("str", generic_constructor()),
"event": ("event", eventparser),
"matrix": ("matrix", generic_constructor()),
"expression": ("str", str),
}
def parse_parameters(unnamedparameters, namedparameters, args, kargs, exactmatch=True):
kargs = dict(kargs)
namedparameternames = [n[0] for n in namedparameters]
for p in namedparameters[len(args):]:
if p[0] not in kargs and p[2] != "no-defaultvalue": kargs[p[0]] = p[2]
wrongargnr = False
if exactmatch:
if len(args) + len(kargs) != len(unnamedparameters) + len(namedparameters):
wrongargnr = True
else:
if len(args) + len(kargs) < len(unnamedparameters) + len(namedparameters):
wrongargnr = True
if wrongargnr:
raise TypeError("""
Number of required parameters: %d (keyword: %s)\n
Number of supplied parameters: %d (%d non-keyword, %d keyword)
""" % (len(unnamedparameters) + len(namedparameters), namedparameternames, len(args) + len(kargs), len(args),
len(kargs)))
if len(args) < len(unnamedparameters):
raise TypeError("""
Number of required non-keyword parameters: %d\n
Number of supplied non-keyword parameters: %d (%d non-keyword, %d keyword)
""" % (len(unnamedparameters), len(args)))
ret1 = []
ret2 = {}
for anr, a in enumerate(args):
if anr == len(unnamedparameters): break
pclass = unnamedparameters[anr][1]
if pclass is object:
pval = a
elif a is None:
pval = None
else:
pval = pclass(a)
ret1.append(pval)
args = args[len(unnamedparameters):]
params = {}
for p in enumerate(namedparameters): params[p[0]] = p[1]
for anr, a in enumerate(args):
if anr >= len(namedparameters):
break
pname, pclass, default = namedparameters[anr]
pclass = pclass[1]
if pclass is object:
pval = a
elif a is None:
pval = None
else:
pva
|
l = pclass(a)
ret2[pname] = pval
unmatched = {}
for pname in kargs:
if pname in ret2:
raise TypeError("Duplicate definition of parameter %s" % pname)
ppar = [v[1] for v in namedparameters if v[0] == pname]
if len(ppar) == 0:
unmatched[pname] = kargs[pname]
continue
ppar = ppar[0]
pclassname = ppar[0]
pclass = ppar[1]
a = kargs[pname]
if pclassname == "bee":
pval = a
u
|
nmatched[pname] = a
else:
if pclass is object:
pval = a
elif a is None:
pval = None
else:
pval = pclass(a)
ret2[pname] = pval
for pname, pclass, default in namedparameters:
if pname not in ret2:
raise TypeError("Undefined parameter '%s'" % pname)
if exactmatch:
return tuple(ret1), ret2
else:
return tuple(ret1), ret2, unmatched
def _parameterclass(parametertuple, *args, **kargs):
return parse_parameters(parametertuple, [], args[0], kargs)[0]
def get_parameterclass(parclassname):
t = typeclass(parclassname)
if t.is_subtype:
return get_parameterclass(parclassname[0])
if isinstance(parclassname, tuple):
parametertuple = [get_parameterclass(p) for p in parclassname]
t0 = [t[0] for t in parametertuple]
t1 = [t[1] for t in parametertuple]
# return [t0, functools.partial(_parameterclass, t1), "no-defaultvalue"]
#return [t0, functools.partial(_parameterclass, parametertuple), "no-defaultvalue"]
return (tuple(t0), functools.partial(_parameterclass, parametertuple), "no-defaultvalue")
elif parclassname in _typemap:
return get_parameterclass(_typemap[parclassname])
elif spyder.validvar2(parclassname):
import Spyder
return (parclassname, functools.partial(spyderparser, parclassname))
else:
return tuple(_parametertypes[parclassname]) + ("no-defaultvalue",)
def add_parameterclass(parclassname, parclassexpr, parclass):
global _parametertypes
assert not isinstance(parclassname, tuple)
_pa
|
DavisPoGo/Monocle
|
migrations/versions/f19fc04ba856_added_existing_tables.py
|
Python
|
mit
| 10,836
| 0.01495
|
"""Added existing tables
Revision ID: f19fc04ba856
Revises:
Create Date: 2017-09-24 03:10:27.208231
"""
from alembic import op
import sqlalchemy as sa
import sys
from pathlib import Path
monocle_dir = str(Path(__file__).resolve().parents[2])
if monocle_dir not in sys.path:
sys.path.append(monocle_dir)
from monocle import db as db
# revision identifiers, used by Alembic.
revision = 'f19fc04ba856'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
#if db._engine.dialect.has_table(db._engine, 'sightings'):
# return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('forts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.String(length=35), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('url', sa.String(length=200), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id')
)
op.create_index('ix_coords', 'forts', ['lat', 'lon'], unique=False)
op.create_table('mystery_sightings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('encounter_id', db.UNSIGNED_HUGE_TYPE, nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('first_seen', sa.Integer(), nullable=True),
sa.Column('first_seconds', sa.SmallInteger(), nullable=True),
sa.Column('last_seconds', sa.SmallInteger(), nullable=True),
sa.Column('seen_range', sa.SmallInteger(), nullable=True),
sa.Column('atk_iv', db.TINY_TYPE, nullable=True),
sa.Column('def_iv', db.TINY_TYPE, nullable=True),
sa.Column('sta_iv', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('gender', sa.SmallInteger(), nullable=True),
sa.Column('form', sa.SmallInteger(), nullable=True),
sa.Column('cp', sa.SmallInteger(), nullable=True),
sa.Column('level', sa.SmallInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('encounter_id', 'spawn_id', name='unique_encounter')
)
op.create_index(op.f('ix_mystery_sightings_encounter_id'), 'mystery_sightings', ['encounter_id'], unique=False)
op.create_index(op.f('ix_mystery_sightings_first_seen'), 'mystery_sightings', ['first_seen'], unique=False)
op.create_index(op.f('ix_mystery_sightings_spawn_id'), 'mystery_sightings', ['spawn_id'], unique=False)
op.create_table('pokestops',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.String(length=35), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('url', sa.String(length=200), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('external_id')
)
op.create_index(op.f('ix_pokestops_lat'), 'pokestops', ['lat'], unique=False)
op.create_index(op.f('ix_pokestops_lon'), 'pokestops', ['lon'], unique=False)
op.create_table('sightings',
sa.Column('id', db.PRIMARY_HUGE_TYPE, nullable=False),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('expire_timestamp', sa.Integer(), nullable=True),
sa.Column('encounter_id', db.UNSIGNED_HUGE_TYPE, nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('atk_iv', db.TINY_TYPE, nullable=True),
sa.Column('def_iv', db.TINY_TYPE, nullable=True),
sa.Column('sta_iv', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nulla
|
ble=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('gender', sa.SmallInteger(), nullable=True),
sa.Column('form', sa.SmallInteger(), nullable=True),
sa.Column('cp', sa.SmallInteger(), nullable=True),
sa.Column('level', sa.SmallInteger(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('encounter_id', 'expire_timestamp', name='timestamp_encounter_id_unique')
)
op.create_index(op
|
.f('ix_sightings_encounter_id'), 'sightings', ['encounter_id'], unique=False)
op.create_index(op.f('ix_sightings_expire_timestamp'), 'sightings', ['expire_timestamp'], unique=False)
op.create_table('spawnpoints',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('spawn_id', sa.BigInteger(), nullable=True),
sa.Column('despawn_time', sa.SmallInteger(), nullable=True),
sa.Column('lat', db.FLOAT_TYPE, nullable=True),
sa.Column('lon', db.FLOAT_TYPE, nullable=True),
sa.Column('updated', sa.Integer(), nullable=True),
sa.Column('duration', db.TINY_TYPE, nullable=True),
sa.Column('failures', db.TINY_TYPE, nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_spawnpoints_despawn_time'), 'spawnpoints', ['despawn_time'], unique=False)
op.create_index(op.f('ix_spawnpoints_spawn_id'), 'spawnpoints', ['spawn_id'], unique=True)
op.create_index(op.f('ix_spawnpoints_updated'), 'spawnpoints', ['updated'], unique=False)
op.create_table('fort_sightings',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('fort_id', sa.Integer(), nullable=True),
sa.Column('last_modified', sa.Integer(), nullable=True),
sa.Column('team', db.TINY_TYPE, nullable=True),
sa.Column('guard_pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('slots_available', sa.SmallInteger(), nullable=True),
sa.Column('is_in_battle', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['fort_id'], ['forts.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('fort_id', 'last_modified', name='fort_id_last_modified_unique')
)
op.create_index(op.f('ix_fort_sightings_last_modified'), 'fort_sightings', ['last_modified'], unique=False)
op.create_table('gym_defenders',
sa.Column('id', db.PRIMARY_HUGE_TYPE, nullable=False),
sa.Column('fort_id', sa.Integer(), nullable=False),
sa.Column('external_id', db.UNSIGNED_HUGE_TYPE, nullable=False),
sa.Column('pokemon_id', sa.Integer(), nullable=True),
sa.Column('owner_name', sa.String(length=128), nullable=True),
sa.Column('nickname', sa.String(length=128), nullable=True),
sa.Column('cp', sa.Integer(), nullable=True),
sa.Column('stamina', sa.Integer(), nullable=True),
sa.Column('stamina_max', sa.Integer(), nullable=True),
sa.Column('atk_iv', sa.SmallInteger(), nullable=True),
sa.Column('def_iv', sa.SmallInteger(), nullable=True),
sa.Column('sta_iv', sa.SmallInteger(), nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('battles_attacked', sa.Integer(), nullable=True),
sa.Column('battles_defended', sa.Integer(), nullable=True),
sa.Column('num_upgrades', sa.SmallInteger(), nullable=True),
sa.Column('created', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['fort_id'], ['forts.id'], onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_gym_defenders_created'), 'gym_defenders', ['created'], unique=False)
op.create_index(op.f('ix_gym_defenders_fort_id'), 'gym_defenders', ['fort_id'], unique=False)
op.create_table('raids',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('external_id', sa.BigInteger(), nullable=True),
sa.Column('fort_id', sa.Integer(), nullable=True),
sa.Column('level', db.TINY_TYPE, nullable=True),
sa.Column('pokemon_id', db.TINY_TYPE, nullable=True),
sa.Column('move_1', sa.SmallInteger(), nullable=True),
sa.Column('move_2', sa.SmallInteger(), nullable=True),
sa.Column('time_spawn', sa.Integer(), nullable=True),
sa.Colu
|
agaffney/ansible
|
lib/ansible/modules/stat.py
|
Python
|
gpl-3.0
| 19,140
| 0.001776
|
#!/usr/bin/python
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: stat
version_added: "1.3"
short_description: Retrieve file or file system status
description:
- Retrieves facts for a file similar to the Linux/Unix 'stat' command.
- For Windows targets, use the M(ansible.windows.win_stat) module instead.
options:
path:
description:
- The full path of the file/object to get the facts of.
type: path
required: true
aliases: [ dest, name ]
follow:
description:
- Whether to follow symlinks.
type: bool
default: no
get_checksum:
description:
- Whether to return a checksum of the file.
type: bool
default: yes
version_added: "1.8"
checksum_algorithm:
description:
- Algorithm to determine checksum of file.
- Will throw an error if the host is unable to use specified algorithm.
- The remote host has to support the hashing method specified, C(md5)
can be unavailable if the host is FIPS-140 compliant.
type: str
choices: [ md5, sha1, sha224, sha256, sha384, sha512 ]
default: sha1
aliases: [ checksum, checksum_algo ]
version_added: "2.0"
get_mime:
description:
- Use file magic and return data about the nature of the file. this uses
the 'file' utility found on most Linux/Unix systems.
- This will add both `mime_type` and 'charset' fields to the return, if possible.
- In Ansible 2.3 this option changed from 'mime' to 'get_mime' and the default changed to 'Yes'.
type: bool
default: yes
aliases: [ mime, mime_type, mime-type ]
version_added: "2.1"
get_attributes:
description:
- Get file attributes using lsattr tool if present.
type: bool
default: yes
aliases: [ attr, attributes ]
version_added: "2.3"
seealso:
- module: ansible.builtin.file
- module: ansible.windows.win_stat
author: Bruce Pennypacker (@bpennypacker)
'''
EXAMPLES = r'''
# Obtain the stats of /etc/foo.conf, and check that the file still belongs
# to 'root'. Fail otherwise.
- name: Get stats of a file
stat:
path: /etc/foo.conf
register: st
- fail:
msg: "Whoops! file ownership has changed"
when: st.stat.pw_name != 'root'
# Determine if a path exists and is a symlink. Note that if the path does
# not exist, and we test sym.stat.islnk, it will fail with an error. So
# therefore, we must test whether it is defined.
# Run this to understand the structure, the skipped ones do not pass the
# check performed by 'when'
- name: Get stats of the FS object
stat:
path: /path/to/something
register: sym
- debug:
msg: "islnk isn't defined (path doesn't exist)"
when: sym.stat.islnk is not defined
- debug:
msg: "islnk is defined (path must exist)"
when: sym.stat.islnk is defined
- debug:
msg: "Path exists and is a symlink"
when: sym.stat.islnk is defined and sym.stat.islnk
- debug:
msg: "Path exists and isn't a symlink"
when: sym.stat.islnk is defined and sym.stat.islnk == False
# Determine if a path exists and is a directory. Note that we need to test
# both that p.stat.isdir actually exists, and also that it's set to true.
- name: Get stats of the FS object
stat:
path: /path/to/something
register: p
- debug:
msg: "Path exists and is a directory"
when: p.stat.isdir is defined and p.stat.isdir
- name: Don't do checksum
stat:
path: /path/to/myhugefile
get_checksum: no
- name: Use sha256 to calculate checksum
stat:
path: /path/to/something
checksum_algorithm: sha256
'''
RETURN = r'''
stat:
description: dictionary containing all the stat data, some platforms might add additional fields
returned: success
type: complex
contains:
exists:
description: If the destination path actually exists or not
returned: success
type: bool
sample: True
path:
description: The full path of the file/object to get the facts of
returned: success and if path exists
type: str
sample: '/path/to/file'
mode:
description: Unix permissions of the file in octal representation as a string
returned: success, path exists and user can read stats
type: str
sample: 1755
isdir:
description: Tells you if the path is a directory
returned: success, path exists and user can read stats
type: bool
sample: False
ischr:
description: Tells you if the path is a character device
returned: success, path exists and user can read stats
type: bool
sample: False
isblk:
description: Tells you if the path is a block device
returned: success, path exists and user can read stats
type: bool
sample: False
isreg:
description: Tells you if the path is a regular file
returned: success, path exists and user can read stats
type: bool
sample: True
isfifo:
description: Tells you if the path is a named pipe
returned: success, path exists and user can read stats
type: bool
sample: False
islnk:
description: Tells you if the path is a symbolic link
returned: success, path exists and user can read stats
type: bool
sample: False
issock:
description: Tells you if the path is a unix domain
|
socket
returned: success, path exists and user can read stats
type: bool
sample: False
uid:
description: N
|
umeric id representing the file owner
returned: success, path exists and user can read stats
type: int
sample: 1003
gid:
description: Numeric id representing the group of the owner
returned: success, path exists and user can read stats
type: int
sample: 1003
size:
description: Size in bytes for a plain file, amount of data for some special files
returned: success, path exists and user can read stats
type: int
sample: 203
inode:
description: Inode number of the path
returned: success, path exists and user can read stats
type: int
sample: 12758
dev:
description: Device the inode resides on
returned: success, path exists and user can read stats
type: int
sample: 33
nlink:
description: Number of links to the inode (hard links)
returned: success, path exists and user can read stats
type: int
sample: 1
atime:
description: Time of last access
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
mtime:
description: Time of last modification
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
ctime:
description: Time of last metadata update or creation (depends on OS)
returned: success, path exists and user can read stats
type: float
sample: 1424348972.575
wusr:
description: Tells you if the owner has write permission
returned: success, path exists and user can read stats
type: bool
sample: True
rusr:
description: Tells you if the owner has read permission
returned: success, path exists and user can read stats
type: bool
sample: True
xusr:
description: Tells you if the owner has execute permission
returned: success, path exist
|
bhattmansi/Implementation-of-CARED-in-ns3
|
src/stats/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 256,748
| 0.014244
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.stats', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## data-output-interface.h (module 'stats'): ns3::DataOutputCallback [class]
module.add_class('DataOutputCallback', allow_subclassing=True)
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## file-helper.h (module 'stats'): ns3::FileHelper [class]
module.add_class('FileHelper')
## gnuplot.h (module 'stats'): ns3::Gnuplot [class]
module.add_class('Gnuplot')
## gnuplot.h (module 'stats'): ns3::GnuplotCollection [class]
module.add_class('GnuplotCollection')
## gnuplot.h (module 'stats'): ns3::GnuplotDataset [class]
module.add_class('GnuplotDataset')
## gnuplot-helper.h (module 'stats'): ns3::GnuplotHelper [class]
module.add_class('GnuplotHelper')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module
|
='ns.core')
## simulator.h (module 'core'): ns3::Simulator [enumeration]
module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'], import_from_module='ns.core')
## data-calculator.h (module 'stats'): ns3::StatisticalSummary [class]
module.add_class('StatisticalSummary', allow_subclassing=True)
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## traced-
|
value.h (module 'core'): ns3::TracedValue<bool> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['bool'])
## traced-value.h (module 'core'): ns3::TracedValue<double> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['double'])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned char> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned char'])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned int> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int'])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned short> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned short'])
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## gnuplot.h (module 'stats'): ns3::Gnuplot2dDataset [class]
module.add_class('Gnuplot2dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'stats'): ns3::Gnuplot2dDataset::Style [enumeration]
module.add_enum('Style', ['LINES', 'POINTS', 'LINES_POINTS', 'DOTS', 'IMPULSES', 'STEPS', 'FSTEPS', 'HISTEPS'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h (module 'stats'): ns3::Gnuplot2dDataset::ErrorBars [enumeration]
module.add_enum('ErrorBars', ['NONE', 'X', 'Y', 'XY'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h (module 'stats'): ns3::Gnuplot2dFunction [class]
module.add_class('Gnuplot2dFunction', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'stats'): ns3::Gnuplot3dDataset [class]
module.add_class('Gnuplot3dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h (module 'stats'): ns3::Gnuplot3
|
NeuroTechX/eeg-101
|
python_tools/utilities.py
|
Python
|
isc
| 5,241
| 0.011067
|
"""
Utilities for plotting various figures and animations in EEG101.
"""
# Author: Hubert Banville <hubert@neurotechx.com>
#
# License: TBD
import numpy as np
import matplotlib.pylab as plt
import collections
from scipy import signal
def dot_plot(x, labels, step=1, figsize=(12,8)):
"""
Make a 1D dot plot.
Inputs
x : 1D array containing the points to plot
labels : 1D array containing the label for each point in x
step : vertical space between two points
"""
# Get the histogram for each class
classes = np.unique(labels)
hist = [np.histogram(x[labels==c], density=True) for c in classes]
# Prepare the figure
fig, ax = plt.subplots(figsize=figsize)
for hi, h in enumerate(hist):
bin_centers = (h[1][1:] + h[1][0:-1])/2. # Get bin centers
# Format the data so that each bin has as many points as the histogram bar for that bin
x1 = []
y1 = []
for i, j in zip(np.round(h[0]).astype(int), bin_centers):
y = range(0, i, step)
y1 += y
x1 += [j]*len(y)
# Plot
ax.plot(x1, (-1)**hi*np.array(y1), 'o', markersize=10, label=classes[hi])
ax.legend(scatterpoints=1)
ax.set_xlabel('Alpha power')
ax.set_ylabel('Number of points')
ax.set_yticklabels([])
ax.set_yticks([])
ax.legend()
plt.tight_layout()
def psd_with_bands_plot(f, psd, figsize=(12,8)):
"""
Plot a static PSD.
INPUTS
f : 1D array containing frequencies of the PSD
psd : 1D array containing the power at each frequency in f
figsize : figure size
"""
bands = collections.OrderedDict()
bands[r'$\delta$'] = (0,4)
bands[r'$\theta$'] = (4,8)
bands[r'$\alpha$'] = (8,13)
bands[r'$\beta$'] = (13, 30)
bands[r'$\gamma$'] = (30, 120)
fig, ax = plt.subplots(figsize=figsize)
ax.plot(f, psd)
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power (dB)')
ylim = ax.get_ylim()
for i, [bkey, bfreq] in enumerate(bands.iteritems()):
ind = (f>=bfreq[0]) & (f<=bfreq[1])
f1 = f[ind]
y1 = psd[ind]
ax.fill_between(f1, y1, ylim[0], facecolor=[(0.7, i/5., 0.7)], alpha=0.5)
ax.text(np.mean(f1), (ylim[0] + ylim[1])/1.22, bkey, fontsize=16, verticalalignment='top', horizontalalignment='center')
ax.set_xlim([min(f), max(f)])
def sinewave(A, f, phi, t):
"""
Return a sine wave with specified parameters at the given time points.
INPUTS
A : Amplitude
f : Frequency (Hz)
phi : Phase (rad)
t : time (in s)
"""
return A*np.sin(2*np.pi*f*t + phi)
def animate_signals(nb_signals, incre, fs=256, refresh_rate=30., anim_dur=10., figsize=(12,8)):
"""
Draw and update a figure in real-time representing the summation of many
sine waves, to explain the concept of Fourier decomposition.
INPUTS
nb_signals : number of signals to sum together
incre : increment, in Hz, between each of the signals
fs : sampling frequency
refresh_rate : refresh rate of the animation
anim_dur : approximate duration of the animation, in seconds
"""
# Initialize values that remain constant throughout the animation
A = 1
t = np.linspace(0, 2, fs)
offsets = np.arange(nb_signals+1).reshape((nb_signals+1,1))*(A*(nb_signals+1))
freqs = np.arange(nb_signals)*incre
# Initialize the figure
fig, ax = plt.subplots(figsize=figsize)
ax.hold(True)
plt.xlabel('Time')
ax.yaxis.set_ticks(offsets)
ax.set_yticklabels([str(f)+' Hz' for f in freqs] + ['Sum'])
ax.xaxis.set_ticks([])
# Initialize the Line2D elements for each signal
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sines = np.vstack((sines, np.sum(sines, axis=0))) + offsets
points = [ax.plot(t, x)[0] for x in sines]
# Animation refresh loop
for i in np.arange(anim_dur*refresh_rate):
# Update time
t = np.linspace(0, 2, fs) + i*fs/refresh_rate
# Update signals
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sine
|
s = np.vstack((sines, np.sum(sines, axis=0))) + offsets
# Update figure
for p, x in zip(points, sines):
p.set_ydata(x)
# Wait before starting another cycle
plt.pause(1./refresh_rate)
if __name__ == '__main__':
# 1) DISTRIBUTION OF TRAINING DATA
# Generate
|
fake data
nb_points = 10*10
relax_data = np.random.normal(0.01, 0.01, size=(nb_points,))
focus_data = np.random.normal(0.03, 0.01, size=(nb_points,))
dot_plot(x=np.concatenate((relax_data, focus_data)),
labels=np.concatenate((np.zeros((nb_points,)), np.ones((nb_points,)))),
step=4)
# 2) PSD PLOT
# Generate fake data
f = np.arange(0, 110, 1) # one-second windows = 1-Hz bins
psd = 10*np.log10(1./f)
psd_with_bands_plot(f, psd)
# 3) FOURIER DECOMPOSITION ANIMATION
animate_signals(4, 2)
|
jwilk/anorack
|
lib/articles.py
|
Python
|
mit
| 1,632
| 0.001232
|
# Copyright © 2016 Jakub Wilk <jwilk@jwilk.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copi
|
es or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO E
|
VENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''
English articles
'''
from lib import phonetics
accents = ''.join(phonetics.accents)
def choose_art(phonemes):
'''
choose correct article for the phonemes:
return "a" or "an" or NotImplemented
'''
try:
p = phonemes.strip(accents)[0]
except IndexError:
return NotImplemented
if p in phonetics.consonants:
return 'a'
elif p in phonetics.vowels:
return 'an'
else:
return NotImplemented
__all__ = ['choose_art']
# vim:ts=4 sts=4 sw=4 et
|
Azure/azure-sdk-for-python
|
sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2021_09_01/models/_models.py
|
Python
|
mit
| 38,322
| 0.00321
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class ActionDetail(msrest.serialization.Model):
"""The action detail.
:param mechanism_type: The mechanism type.
:type mechanism_typ
|
e: str
:param name: The name of the action.
:type name: str
:param status: The status of the action.
:type status: str
:p
|
aram sub_state: The substatus of the action.
:type sub_state: str
:param send_time: The send time.
:type send_time: str
:param detail: The detail of the friendly error message.
:type detail: str
"""
_attribute_map = {
'mechanism_type': {'key': 'MechanismType', 'type': 'str'},
'name': {'key': 'Name', 'type': 'str'},
'status': {'key': 'Status', 'type': 'str'},
'sub_state': {'key': 'SubState', 'type': 'str'},
'send_time': {'key': 'SendTime', 'type': 'str'},
'detail': {'key': 'Detail', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ActionDetail, self).__init__(**kwargs)
self.mechanism_type = kwargs.get('mechanism_type', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
self.sub_state = kwargs.get('sub_state', None)
self.send_time = kwargs.get('send_time', None)
self.detail = kwargs.get('detail', None)
class ActionGroupList(msrest.serialization.Model):
"""A list of action groups.
:param value: The list of action groups.
:type value: list[~$(python-base-namespace).v2021_09_01.models.ActionGroupResource]
:param next_link: Provides the link to retrieve the next set of elements.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ActionGroupResource]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ActionGroupList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ActionGroupPatchBody(msrest.serialization.Model):
"""An action group object for the body of patch operations.
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param enabled: Indicates whether this action group is enabled. If an action group is not
enabled, then none of its actions will be activated.
:type enabled: bool
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ActionGroupPatchBody, self).__init__(**kwargs)
self.tags = kwargs.get('tags', None)
self.enabled = kwargs.get('enabled', True)
class AzureResource(msrest.serialization.Model):
"""An azure resource object.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Azure resource Id.
:vartype id: str
:ivar name: Azure resource name.
:vartype name: str
:ivar type: Azure resource type.
:vartype type: str
:ivar kind: Azure resource kind.
:vartype kind: str
:ivar identity: Azure resource identity.
:vartype identity: str
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'identity': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(AzureResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.kind = None
self.identity = None
self.location = kwargs['location']
self.tags = kwargs.get('tags', None)
class ActionGroupResource(AzureResource):
"""An action group resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Azure resource Id.
:vartype id: str
:ivar name: Azure resource name.
:vartype name: str
:ivar type: Azure resource type.
:vartype type: str
:ivar kind: Azure resource kind.
:vartype kind: str
:ivar identity: Azure resource identity.
:vartype identity: str
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param group_short_name: The short name of the action group. This will be used in SMS messages.
:type group_short_name: str
:param enabled: Indicates whether this action group is enabled. If an action group is not
enabled, then none of its receivers will receive communications.
:type enabled: bool
:param email_receivers: The list of email receivers that are part of this action group.
:type email_receivers: list[~$(python-base-namespace).v2021_09_01.models.EmailReceiver]
:param sms_receivers: The list of SMS receivers that are part of this action group.
:type sms_receivers: list[~$(python-base-namespace).v2021_09_01.models.SmsReceiver]
:param webhook_receivers: The list of webhook receivers that are part of this action group.
:type webhook_receivers: list[~$(python-base-namespace).v2021_09_01.models.WebhookReceiver]
:param itsm_receivers: The list of ITSM receivers that are part of this action group.
:type itsm_receivers: list[~$(python-base-namespace).v2021_09_01.models.ItsmReceiver]
:param azure_app_push_receivers: The list of AzureAppPush receivers that are part of this
action group.
:type azure_app_push_receivers:
list[~$(python-base-namespace).v2021_09_01.models.AzureAppPushReceiver]
:param automation_runbook_receivers: The list of AutomationRunbook receivers that are part of
this action group.
:type automation_runbook_receivers:
list[~$(python-base-namespace).v2021_09_01.models.AutomationRunbookReceiver]
:param voice_receivers: The list of voice receivers that are part of this action group.
:type voice_receivers: list[~$(python-base-namespace).v2021_09_01.models.VoiceReceiver]
:param logic_app_receivers: The list of logic app receivers that are part of this action group.
:type logic_app_receivers: list[~$(python-base-namespace).v2021_09_01.models.LogicAppReceiver]
:param azure_function_receivers: The list of azure function receivers that are part of this
action group.
:type azure_function_receivers:
list[~$(python-base-namespace).v2021_09_01.models.AzureFunctionReceiver]
:param arm_role_receivers: The list of ARM role receivers that are part of this action group.
Roles are Azure RBAC roles and only built-in roles are supported.
:type arm_role_receivers: list[~$(python-base-namespace).v2021_09_01.models.ArmRoleReceiver]
:param event_hub_receivers: The list of event hub receivers that are part of this action group.
:type
|
cmgrote/tapiriik
|
tapiriik/urls.py
|
Python
|
apache-2.0
| 7,197
| 0.007781
|
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'tapiriik.web.views.dashboard', name='dashboard'),
url(r'^auth/redirect/(?P<service>[^/]+)$', 'tapiriik.web.views.oauth.authredirect', {}, name='oauth_redirect', ),
url(r'^auth/redirect/(?P<service>[^/]+)/(?P<level>.+)$', 'tapiriik.web.views.oauth.authredirect', {}, name='oauth_redirect', ),
url(r'^auth/return/(?P<service>[^/]+)$', 'tapiriik.web.views.oauth.authreturn', {}, name='oauth_return', ),
url(r'^auth/return/(?P<service>[^/]+)/(?P<level>.+)$', 'tapiriik.web.views.oauth.authreturn', {}, name='oauth_return', ), # django's URL magic couldn't handle the equivalent regex
url(r'^auth/login/(?P<service>.+)$', 'tapiriik.web.views.auth_login', {}, name='auth_simple', ),
url(r'^auth/login-ajax/(?P<service>.+)$', 'tapiriik.web.views.auth_login_ajax', {}, name='auth_simple_ajax', ),
url(r'^auth/persist-ajax/(?P<service>.+)$', 'tapiriik.web.views.auth_persist_extended_auth_ajax', {}, name='auth_persist_extended_auth_ajax', ),
url(r'^auth/disconnect/(?P<service>.+)$', 'tapiriik.web.views.auth_disconnect', {}, name='auth_disconnect', ),
url(r'^auth/disconnect-ajax/(?P<service>.+)$', 'tapiriik.web.views.auth_disconnect_ajax', {}, name='auth_disconnect_ajax', ),
url(r'^auth/logout$', 'tapiriik.web.views.auth_logout', {}, name='auth_logout', ),
url(r'^account/setemail$', 'tapiriik.web.views.account_setemail', {}, name='account_set_email', ),
url(r'^account/settz$', 'tapiriik.web.views.account_settimezone', {}, name='account_set_timezone', ),
url(r'^account/configure$', 'tapiriik.web.views.account_setconfig', {}, name='account_set_config', ),
url(r'^account/rollback/?$', 'tapiriik.web.views.account_rollback_initiate', {}, name='account_rollback_initiate', ),
url(r'^account/rollback/(?P<task_id>.+)$', 'tapiriik.web.views.account_rollback_status', {}, name='account_rollback_status', ),
url(r'^rollback$', 'tapiriik.web.views.r
|
ollback_dashboard', {}, name='rollback_dashboard', ),
url(r'^configure/sav
|
e/(?P<service>.+)?$', 'tapiriik.web.views.config.config_save', {}, name='config_save', ),
url(r'^configure/dropbox$', 'tapiriik.web.views.config.dropbox', {}, name='dropbox_config', ),
url(r'^configure/flow/save/(?P<service>.+)?$', 'tapiriik.web.views.config.config_flow_save', {}, name='config_flow_save', ),
url(r'^settings/?$', 'tapiriik.web.views.settings.settings', {}, name='settings_panel', ),
url(r'^dropbox/browse-ajax/?$', 'tapiriik.web.views.dropbox.browse', {}, name='dropbox_browse_ajax', ),
url(r'^dropbox/browse-ajax/(?P<path>.+)?$', 'tapiriik.web.views.dropbox.browse', {}, name='dropbox_browse_ajax', ),
url(r'^sync/status$', 'tapiriik.web.views.sync_status', {}, name='sync_status'),
url(r'^sync/activity$', 'tapiriik.web.views.sync_recent_activity', {}, name='sync_recent_activity'),
url(r'^sync/schedule/now$', 'tapiriik.web.views.sync_schedule_immediate', {}, name='sync_schedule_immediate'),
url(r'^sync/errors/(?P<service>[^/]+)/clear/(?P<group>.+)$', 'tapiriik.web.views.sync_clear_errorgroup', {}, name='sync_clear_errorgroup'),
url(r'^activities$', 'tapiriik.web.views.activities_dashboard', {}, name='activities_dashboard'),
url(r'^activities/fetch$', 'tapiriik.web.views.activities_fetch_json', {}, name='activities_fetch_json'),
url(r'^sync/remote_callback/trigger_partial_sync/(?P<service>.+)$', 'tapiriik.web.views.sync_trigger_partial_sync_callback', {}, name='sync_trigger_partial_sync_callback'),
url(r'^diagnostics/$', 'tapiriik.web.views.diag_dashboard', {}, name='diagnostics_dashboard'),
url(r'^diagnostics/queue$', 'tapiriik.web.views.diag_queue_dashboard', {}, name='diagnostics_queue_dashboard'),
url(r'^diagnostics/errors$', 'tapiriik.web.views.diag_errors', {}, name='diagnostics_errors'),
url(r'^diagnostics/error/(?P<error>.+)$', 'tapiriik.web.views.diag_error', {}, name='diagnostics_error'),
url(r'^diagnostics/graphs$', 'tapiriik.web.views.diag_graphs', {}, name='diagnostics_graphs'),
url(r'^diagnostics/user/unsu$', 'tapiriik.web.views.diag_unsu', {}, name='diagnostics_unsu'),
url(r'^diagnostics/user/(?P<user>.+)$', 'tapiriik.web.views.diag_user', {}, name='diagnostics_user'),
url(r'^diagnostics/payments/$', 'tapiriik.web.views.diag_payments', {}, name='diagnostics_payments'),
url(r'^diagnostics/ip$', 'tapiriik.web.views.diag_ip', {}, name='diagnostics_ip'),
url(r'^diagnostics/login$', 'tapiriik.web.views.diag_login', {}, name='diagnostics_login'),
url(r'^supported-activities$', 'tapiriik.web.views.supported_activities', {}, name='supported_activities'),
# url(r'^supported-services-poll$', 'tapiriik.web.views.supported_services_poll', {}, name='supported_services_poll'),
url(r'^payments/claim$', 'tapiriik.web.views.payments_claim', {}, name='payments_claim'),
url(r'^payments/claim-ajax$', 'tapiriik.web.views.payments_claim_ajax', {}, name='payments_claim_ajax'),
url(r'^payments/promo-claim-ajax$', 'tapiriik.web.views.payments_promo_claim_ajax', {}, name='payments_promo_claim_ajax'),
url(r'^payments/claim-wait-ajax$', 'tapiriik.web.views.payments_claim_wait_ajax', {}, name='payments_claim_wait_ajax'),
url(r'^payments/claim/(?P<code>[a-f0-9]+)$', 'tapiriik.web.views.payments_claim_return', {}, name='payments_claim_return'),
url(r'^payments/return$', 'tapiriik.web.views.payments_return', {}, name='payments_return'),
url(r'^payments/confirmed$', 'tapiriik.web.views.payments_confirmed', {}, name='payments_confirmed'),
url(r'^payments/ipn$', 'tapiriik.web.views.payments_ipn', {}, name='payments_ipn'),
url(r'^payments/external/(?P<provider>[^/]+)/refresh$', 'tapiriik.web.views.payments_external_refresh', {}, name='payments_external_refresh'),
url(r'^ab/begin/(?P<key>[^/]+)$', 'tapiriik.web.views.ab_web_experiment_begin', {}, name='ab_web_experiment_begin'),
url(r'^privacy$', 'tapiriik.web.views.privacy.privacy', name='privacy'),
url(r'^garmin_connect_users$', TemplateView.as_view(template_name='static/garmin_connect_users.html'), name='garmin_connect_users'),
url(r'^faq$', TemplateView.as_view(template_name='static/faq.html'), name='faq'),
url(r'^credits$', TemplateView.as_view(template_name='static/credits.html'), name='credits'),
url(r'^contact$', TemplateView.as_view(template_name='static/contact.html'), name='contact'),
# Examples:
# url(r'^$', 'tapiriik.views.home', name='home'),
# url(r'^tapiriik/', include('tapiriik.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
|
uclaros/QGIS
|
python/PyQt/PyQt5/uic/pyuic.py
|
Python
|
gpl-2.0
| 1,079
| 0
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
pyuic.py
---------------------
Date : March 2016
Copyright : (C) 2016 by Juergen E. Fischer
Email : jef at norbit dot de
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; ei
|
ther version 2 of the License, or *
* (at your option) any later version. *
*
|
*
***************************************************************************
"""
__author__ = 'Juergen E. Fischer'
__date__ = 'March 2016'
__copyright__ = '(C) 2016, Juergen E. Fischer'
from PyQt5.uic import pyuic
if (callable(pyuic.main)):
pyuic.main()
|
lisawei/api_automate_test
|
base.py
|
Python
|
apache-2.0
| 1,990
| 0.00603
|
#coding=utf-8
import httplib
import urllib, urllib2
import json
import base64
import functools
import logging
import time
class RequestApi(object):
TimeOut = 3
DEBUG_LEVEL = 1
HOST = "api.douban.com"
@classmethod
def request(cls, method, path, params, headers={}, host=''):
"""test --- http://api.douban.com/book/subject/1220562?alt=json """
_headers = {'Accept-Language': 'zh-cn', 'User-Agent': 'Python/Automate', "Accept-Charset": "utf-8"}
_headers.update(headers)
host = host == '' and cls.HOST or host
conn = httplib.HTTPConnection(host, timeout=cls.TimeOut)
for k, v in params.items():
if v == '' or v == None:
del params[k]
params = urllib.urlencode(params)
if method == "GET":
path = "
|
%s?%s" % (path, params)
params = ''
else:
path = "%s" % path
logging.debug("*[Requst]* %s %s %s" % (method, host + path, params))
conn.request(method, path, params, _headers)
#conn.set
|
_debuglevel(cls.DEBUG_LEVEL)
try:
r = conn.getresponse()
data = r.read()
return data
except Exception,e:
logging.error("*[Requst]* %s %s %s request error:%s" % (method, host + path, params,e))
raise e
finally:
conn.close()
@classmethod
def get(cls, path, params, headers={}, host=''):
return cls.request("GET", path, params, headers, host)
@classmethod
def get_json(cls, path, params, headers={}, host=''):
return json.loads(cls.request("GET", path, params, headers, host))
@classmethod
def post(cls, path, params, headers={}, host=''):
return cls.request("POST", path, params, headers, host)
@classmethod
def post_json(cls, path, params, headers={}, host=''):
return json.loads(cls.request("POST", path, params, headers, host))
|
nkgilley/home-assistant
|
homeassistant/components/pvpc_hourly_pricing/sensor.py
|
Python
|
apache-2.0
| 5,339
| 0.001499
|
"""Sensor to collect the reference daily prices of electricity ('PVPC') in Spain."""
import logging
from random import randint
from typing import Optional
from aiopvpc import PVPCData
from homeassistant import config_entries
from homeassistant.const import CONF_NAME, ENERGY_KILO_WATT_HOUR
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later, async_track_time_change
from homeassistant.helpers.restore_state import RestoreEntity
import homeassistant.util.dt as dt_util
from .const import ATTR_TARIFF
_LOGGER = logging.getLogger(__name__)
ATTR_PRICE = "price"
ICON = "mdi:currency-eur"
UNIT = f"€/{ENERGY_KILO_WATT_HOUR}"
_DEFAULT_TIMEOUT = 10
async def async_setup_entry(
hass: HomeAssistant, config_entry: config_entries.ConfigEntry, async_a
|
dd_entities
):
"""Set up the elect
|
ricity price sensor from config_entry."""
name = config_entry.data[CONF_NAME]
pvpc_data_handler = PVPCData(
tariff=config_entry.data[ATTR_TARIFF],
local_timezone=hass.config.time_zone,
websession=async_get_clientsession(hass),
logger=_LOGGER,
timeout=_DEFAULT_TIMEOUT,
)
async_add_entities(
[ElecPriceSensor(name, config_entry.unique_id, pvpc_data_handler)], False
)
class ElecPriceSensor(RestoreEntity):
"""Class to hold the prices of electricity as a sensor."""
unit_of_measurement = UNIT
icon = ICON
should_poll = False
def __init__(self, name, unique_id, pvpc_data_handler):
"""Initialize the sensor object."""
self._name = name
self._unique_id = unique_id
self._pvpc_data = pvpc_data_handler
self._num_retries = 0
self._hourly_tracker = None
self._price_tracker = None
async def async_will_remove_from_hass(self) -> None:
"""Cancel listeners for sensor updates."""
self._hourly_tracker()
self._price_tracker()
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
state = await self.async_get_last_state()
if state:
self._pvpc_data.state = state.state
# Update 'state' value in hour changes
self._hourly_tracker = async_track_time_change(
self.hass, self.update_current_price, second=[0], minute=[0]
)
# Update prices at random time, 2 times/hour (don't want to upset API)
random_minute = randint(1, 29)
mins_update = [random_minute, random_minute + 30]
self._price_tracker = async_track_time_change(
self.hass, self.async_update_prices, second=[0], minute=mins_update
)
_LOGGER.debug(
"Setup of price sensor %s (%s) with tariff '%s', "
"updating prices each hour at %s min",
self.name,
self.entity_id,
self._pvpc_data.tariff,
mins_update,
)
await self.async_update_prices(dt_util.utcnow())
self.update_current_price(dt_util.utcnow())
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._pvpc_data.state
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._pvpc_data.state_available
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._pvpc_data.attributes
@callback
def update_current_price(self, now):
"""Update the sensor state, by selecting the current price for this hour."""
self._pvpc_data.process_state_and_attributes(now)
self.async_write_ha_state()
async def async_update_prices(self, now):
"""Update electricity prices from the ESIOS API."""
prices = await self._pvpc_data.async_update_prices(now)
if not prices and self._pvpc_data.source_available:
self._num_retries += 1
if self._num_retries > 2:
_LOGGER.warning(
"%s: repeated bad data update, mark component as unavailable source",
self.entity_id,
)
self._pvpc_data.source_available = False
return
retry_delay = 2 * self._num_retries * self._pvpc_data.timeout
_LOGGER.debug(
"%s: Bad update[retry:%d], will try again in %d s",
self.entity_id,
self._num_retries,
retry_delay,
)
async_call_later(self.hass, retry_delay, self.async_update_prices)
return
if not prices:
_LOGGER.debug("%s: data source is not yet available", self.entity_id)
return
self._num_retries = 0
if not self._pvpc_data.source_available:
self._pvpc_data.source_available = True
_LOGGER.warning("%s: component has recovered data access", self.entity_id)
self.update_current_price(now)
|
aricaldeira/pyxmlsec
|
examples/sign1.py
|
Python
|
gpl-2.0
| 3,625
| 0.006345
|
#!/usr/bin/env python
#
# $Id: sign1.py 363 2006-01-01 18:03:07Z valos $
#
# PyXMLSec example: Signing a template file.
#
# Signs a template file using a key from PEM file
#
# Usage:
# ./sign1.py <xml-tmpl> <pem-key>
#
# Example:
# ./sign1.py sign1-tmpl.xml rsakey.pem > sign1-res.xml
#
# The result signature could be validated using verify1 example:
# ./verify1.py sign1-res.xml rsapub.pem
#
# This is free software; see COPYING file in the source
# distribution for preciese wording.
#
# Copyright (C) 2003-2004 Valery Febvre <vfebvre@easter-eggs.com>
#
import sys
sys.path.insert(0, '../')
import libxml2
import xmlsec
def main():
assert(sys.argv)
if len(sys.argv) < 3:
print "Error: wrong number of arguments."
print "Usage: %s <xml-tmpl> <pem-key>" % sys.argv[0]
return sys.exit(1)
# Init libxml library
libxml2.initParser()
libxml2.substituteEntitiesDefault(1)
# Init xmlsec library
if xmlsec.init() < 0:
print "Error: xmlsec initialization failed."
return sys.exit(-1)
# Check loaded library version
if xmlsec.checkVersion() != 1:
print "Error: loaded xmlsec library version is not compatible.\n"
sys.exit(-1)
# Init crypto library
if xmlsec.cryptoAppInit(None) < 0:
print "Error: crypto initialization failed."
# Init xmlsec-crypto library
if xmlsec.cryptoInit() < 0:
print "Error: xmlsec-crypto initialization failed."
res = sign_file(sys.argv[1], sys.argv[2])
# Shutdown xmlsec-crypto library
xmlsec.cryptoShutdown()
# Shutdown crypto library
xmlsec.cryptoAppShutdown()
# Shutdown xmlsec library
xmlsec.shutdown()
# Shutdown LibXML2
libxml2.cleanupParser()
sys.exit(res)
# Signs the tmpl_file using private key from key_file.
# Returns 0 on success or a negative value if an error occurs.
def sign_file(tmpl_file, key_file):
assert(tmpl_fi
|
le)
assert(key_file)
# Load template
doc = libxml2.parseFile(tmpl_file)
if doc is None or doc.getRootElement() is None:
print "Error:
|
unable to parse file \"%s\"" % tmpl_file
return -1
# Find start node
node = xmlsec.findNode(doc.getRootElement(), xmlsec.NodeSignature,
xmlsec.DSigNs)
if node is None:
print "Error: start node not found in \"%s\"" % tmpl_file
return cleanup(doc)
# Create signature context, we don't need keys manager in this example
dsig_ctx = xmlsec.DSigCtx()
if dsig_ctx is None:
print "Error: failed to create signature context"
return cleanup(doc)
# Load private key, assuming that there is not password
key = xmlsec.cryptoAppKeyLoad(key_file, xmlsec.KeyDataFormatPem,
None, None, None)
if key is None:
print "Error: failed to load private pem key from \"%s\"" % key_file
return cleanup(doc, dsig_ctx)
dsig_ctx.signKey = key
# Set key name to the file name, this is just an example!
if key.setName(key_file) < 0:
print "Error: failed to set key name for key from \"%s\"" % key_file
return cleanup(doc, dsig_ctx)
# Sign the template
if dsig_ctx.sign(node) < 0:
print "Error: signature failed"
return cleanup(doc, dsig_ctx)
# Print signed document to stdout
doc.dump("-")
# Success
return cleanup(doc, dsig_ctx, 1)
def cleanup(doc=None, dsig_ctx=None, res=-1):
if dsig_ctx is not None:
dsig_ctx.destroy()
if doc is not None:
doc.freeDoc()
return res
if __name__ == "__main__":
main()
|
JuliaLang/pyjulia
|
src/julia/tests/test_juliaoptions.py
|
Python
|
mit
| 1,232
| 0
|
import pytest
from julia.core import JuliaOptions
# fmt: off
@pytest.mark.parametrize("kwargs, args", [
({}, []),
(dict(compiled_modules=None), []),
(dict(compiled_modules=False), ["--compiled-modules", "no"]),
(di
|
ct(compiled_modules="no"), ["--compiled-modules", "no"]),
(dict(depwarn="error"), ["--depwarn", "error"]),
(dict(sysimage="PATH"), ["--sysimage", "PATH"]),
(dict(bindir="PATH"), ["--home", "PATH"]),
])
# fmt: on
def test_as_args(
|
kwargs, args):
assert JuliaOptions(**kwargs).as_args() == args
@pytest.mark.parametrize("kwargs", [
dict(compiled_modules="invalid value"),
dict(bindir=123456789),
])
def test_valueerror(kwargs):
with pytest.raises(ValueError) as excinfo:
JuliaOptions(**kwargs)
assert "Option" in str(excinfo.value)
assert "accept" in str(excinfo.value)
# fmt: off
@pytest.mark.parametrize("kwargs", [
dict(invalid_option=None),
dict(invalid_option_1=None, invalid_option_2=None),
])
# fmt: on
def test_unsupported(kwargs):
with pytest.raises(TypeError) as excinfo:
JuliaOptions(**kwargs)
assert "Unsupported Julia option(s): " in str(excinfo.value)
for key in kwargs:
assert key in str(excinfo.value)
|
lubao/UjU_Windows
|
src/GammuSender.py
|
Python
|
mit
| 1,013
| 0.008885
|
'''
Created on Jan 18, 2010
@author: Paul
'''
from SQLEng import SQLEng
class PduSender(object):
'''
classdocs
This class is designed for Gammu-smsd
Inserting a record into MySQL
Gammu-smsd will send the record
Using command line will cause smsd stop for a while
'''
|
def get_mesg(self,byte_array):
mesg = ""
for byte in byte_array:
if by
|
te < 16 :
val = hex(byte)
if val == "0x0" :
val = "00"
else :
val = val.lstrip("0x")
val = "{0}{1}".format('0', val)
else :
val = hex(byte)
val = val.lstrip("0x")
mesg += val
return mesg
def send(self,to,byte_array):
sEng = SQLEng()
sEng.exeSQL(sEng.getInsetSentBox(to, self.get_mesg(byte_array)))
def __init__(self):
'''
Constructor
'''
pass
|
i3visio/osrframework
|
osrframework/wrappers/pending/streakgaming.py
|
Python
|
agpl-3.0
| 4,315
| 0.009042
|
# !/usr/bin/python
# -*- coding: cp1252 -*-
#
##################################################################################
#
# Copyright 2016 Félix Brezo and Yaiza Rubio (i3visio, contacto@i3visio.com)
#
# This program is part of OSRFramework. You can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have recei
|
ved a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##################################################################################
__author__ = "Yaiza Rubio and Félix Brezo <contacto@i3visio.com>"
__version__ = "1.1"
import argparse
import json
import re
import sys
import urllib2
import osrframework.utils.browser as browser
from osrframework.utils.platforms import Platform
class Streakgaming
|
(Platform):
"""
A <Platform> object for Streakgaming.
"""
def __init__(self):
"""
Constructor...
"""
self.platformName = "Streakgaming"
self.tags = ["social", "news", "gaming"]
########################
# Defining valid modes #
########################
self.isValidMode = {}
self.isValidMode["phonefy"] = False
self.isValidMode["usufy"] = True
self.isValidMode["searchfy"] = False
######################################
# Search URL for the different modes #
######################################
# Strings with the URL for each and every mode
self.url = {}
#self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>"
self.url["usufy"] = "http://www.streakgaming.com/forum/members/" + "<usufy>" + ".html"
#self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>"
######################################
# Whether the user needs credentials #
######################################
self.needsCredentials = {}
#self.needsCredentials["phonefy"] = False
self.needsCredentials["usufy"] = False
#self.needsCredentials["searchfy"] = False
#################
# Valid queries #
#################
# Strings that will imply that the query number is not appearing
self.validQuery = {}
# The regular expression '.+' will match any query.
#self.validQuery["phonefy"] = ".*"
self.validQuery["usufy"] = ".+"
#self.validQuery["searchfy"] = ".*"
###################
# Not_found clues #
###################
# Strings that will imply that the query number is not appearing
self.notFoundText = {}
#self.notFoundText["phonefy"] = []
self.notFoundText["usufy"] = ["<title>Streak Gaming Online Gambling Forum</title>"]
#self.notFoundText["searchfy"] = []
#########################
# Fields to be searched #
#########################
self.fieldsRegExp = {}
# Definition of regular expressions to be searched in phonefy mode
#self.fieldsRegExp["phonefy"] = {}
# Example of fields:
#self.fieldsRegExp["phonefy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in usufy mode
self.fieldsRegExp["usufy"] = {}
# Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = ""
# Definition of regular expressions to be searched in searchfy mode
#self.fieldsRegExp["searchfy"] = {}
# Example of fields:
#self.fieldsRegExp["searchfy"]["i3visio.location"] = ""
################
# Fields found #
################
# This attribute will be feeded when running the program.
self.foundFields = {}
|
RedhawkSDR/integration-gnuhawk
|
components/sig_source_i/tests/test_sig_source_i.py
|
Python
|
gpl-3.0
| 4,531
| 0.006621
|
#!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public
|
License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of
|
MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in sig_source_i"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
self.assertEqual(self.spd.get_id(), self.comp.ref._get_identifier())
#######################################################################
# Simulate regular component startup
# Verify that initialize nor configure throw errors
self.comp.initialize()
configureProps = self.getPropertySet(kinds=("configure",), modes=("readwrite", "writeonly"), includeNil=False)
self.comp.configure(configureProps)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../sig_source_i.spd.xml") # By default tests all implementations
|
wavefrontHQ/wavefront-collector
|
wavefront/awsbilling.py
|
Python
|
apache-2.0
| 17,443
| 0.001433
|
"""
This module handles parsing the AWS Billing Reports (stored on S3 in .zip
or just plain .csv format) and creating metrics to be sent to the WF proxy.
"""
import ConfigParser
import datetime
import io
import os
import sys
import time
import traceback
import zipfile
import logging.config
import dateutil
from wavefront.aws_common import AwsBaseMetricsCommand, AwsBaseMetricsConfiguration
from wavefront import utils
#pylint: disable=too-few-public-methods
#pylint: disable=too-many-instance-attributes
class AwsBillingConfiguration(AwsBaseMetricsConfiguration):
"""
Configuration for billing
"""
def __init__(self, config_file_path):
super(AwsBillingConfiguration, self).__init__(
config_file_path=config_file_path)
self.enabled = self.getboolean('aws_billing', 'enabled', False)
self.role_arn = self.get('aws_billing', 'role_arn', None)
self.role_external_id = self.get(
'aws_billing', 'external_id', None)
self.billing_thread_names = self.getlist(
'aws_billing', 'billing_threads', [])
self.ec2_tag_keys = self.getlist('aws_billing', 'ec2_tag_keys', [])
self.billing_threads = []
for name in self.billing_thread_names:
section = 'billing-' + name
self.billing_threads.append(
AwsBillingDetailThreadConfiguration(self, section))
def validate(self):
"""
Validation of configuration
"""
pass
def get_region_config(self, _):
"""
Gets the configuration for cloudwatch for the given region
Arguments:
region - the name of the region
"""
return self
#pylint: disable=too-few-public-methods
#pylint: disable=too-many-instance-attributes
class AwsBillingDetailThreadConfiguration(object):
"""
Configuration for a billing detail section in the configuration file
"""
def __init__(self, config, section_name):
super(AwsBillingDetailThreadConfiguration, self).__init__()
self.config = config
self.section_name = section_name
self.last_run_time_section = section_name
self.tmp_dir = self.config.get(section_name, 'tmp_dir', '/tmp/')
self.namespace = self.config.get(section_name, 'name
|
space', None)
self.enabled = self.
|
config.getboolean(section_name, 'enabled', False)
self.region = self.config.get(section_name, 's3_region', None)
self.bucket = self.config.get(section_name, 's3_bucket', None)
self.prefix = self.config.get(section_name, 's3_prefix', None)
self.header_row_index = int(
self.config.get(section_name, 'header_row_index', 1))
self.dimensions = self._build_table(
self.config.getlist(section_name, 'dimension_column_names', []))
self.metrics = self._build_table(
self.config.getlist(section_name, 'metric_column_names', []))
self.source_names = self.config.getlist(section_name, 'source_names', [])
self.dates = self._build_table(
self.config.getlist(section_name, 'date_column_names', []), '|')
self.duration = self.config.getlist(section_name, 'duration_column_names', [])
self.instance_id_columns = self.config.getlist(
section_name, 'instance_id_column_names', [])
self.delay = int(self.config.get(section_name, 'delay', 3600))
self.record_id_column = self.config.get(
section_name, 'record_id_column_name', None)
self.maximum_number_of_rows = int(self.config.get(
section_name, 'maximum_number_of_rows', 0))
self.sleep_after_rows = int(self.config.get(
section_name, 'sleep_after_rows', 0))
self.sleep_ms = float(self.config.get(
section_name, 'sleep_ms', 0.0)) / 1000
@staticmethod
def _build_table(lst, delimiter=':'):
"""
Build a dictionary from a list of delimiter-separated key-value pairs
Arguments:
lst - list of strings
delimiter - delimiter between components of each string in the lst
Returns:
dictionary with the key being the string on the left side of
the delimiter and the value of the dictionary key being the string
on the right side
"""
rtn = {}
if lst:
for item in lst:
parts = item.split(delimiter)
if len(parts) == 1:
rtn[parts[0]] = parts[0]
elif len(parts) == 2:
rtn[parts[0]] = parts[1]
return rtn
def get_last_record_id(self, curr_month):
"""
Gets the last record id for the given month
"""
return self.config.output.get(
self.section_name, 'last_record_id_' + curr_month, None)
def set_last_record_id(self, curr_month, record_id):
"""
Sets the last record id read
Arguments:
record_id - last record id
"""
if not record_id:
return
self.config.output.set(
self.section_name, 'last_record_id_' + curr_month, record_id)
self.config.output.save()
class AwsBillingMetricsCommand(AwsBaseMetricsCommand):
"""
Billing metrics command object. Grabs metrics from billing CSV files.
"""
def __init__(self, **kwargs):
super(AwsBillingMetricsCommand, self).__init__(**kwargs)
def _initialize(self, args):
"""
Initialize this command
Arguments:
arg - the argparse parser object returned from argparser
"""
self.config = AwsBillingConfiguration(args.config_file_path)
self.config.validate()
try:
logging.config.fileConfig(args.config_file_path)
except ConfigParser.NoSectionError:
pass
self.logger = logging.getLogger()
def _process(self):
"""
Processes the latest billing details CSV file. A few helpful sites:
http://www.dowdandassociates.com/products/cloud-billing/documentation/1.0/schema/
http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/detailed-billing-reports.html#reportstagsresources
"""
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
if utils.CANCEL_WORKERS_EVENT.is_set():
return
if not self.config.enabled:
self.logger.info('Billing is disabled')
return
for config in self.config.billing_threads:
if utils.CANCEL_WORKERS_EVENT.is_set():
break
try:
if config.enabled:
last_run_time = config.config.get_last_run_time()
if last_run_time:
diff = utcnow - last_run_time
if diff.total_seconds() <= config.delay:
self.logger.info('Not ready to run %s (last run at '
'%s; expected delay interval is %ds)',
config.section_name,
str(last_run_time),
config.delay)
continue
if config.bucket == 'local':
self.logger.info('Running in local mode ...')
self._get_csv_from_local(config)
else:
self._get_csv_from_s3(config)
config.config.set_last_run_time(utcnow, None, True)
else:
self.logger.info('Billing thread %s is disabled',
config.section_name)
#pylint: disable=bare-except
except:
self.logger.error('%s failed: %s', config.section_name,
sys.exc_info()[1])
traceback.print_exc()
def _get_csv_from_local(self, config):
"""
Opens a CSV file on the local machine
Arguments:
config - the AwsBillingD
|
dbrattli/RxPY
|
tests/test_observable/test_withlatestfrom.py
|
Python
|
apache-2.0
| 14,723
| 0.001019
|
import unittest
from rx import Observable
from rx.testing import TestScheduler, ReactiveTest, is_prime, MockDisposable
from rx.disposables import Disposable, SerialDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class RxException(Exception):
pass
# Helper function for raising exceptions within lambdas
def _raise(ex):
raise RxException(ex)
class TestWithLatestFrom(unittest.TestCase):
def test_with_latest_from_never_never(self):
scheduler = TestScheduler()
e1 = Observable.never()
e2 = Observable.never()
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal()
def test_with_latest_from_never_empty(self):
scheduler = TestScheduler()
msgs = [on_next(150, 1), on_completed(210)]
e1 = Observable.never()
e2 = scheduler.create_hot_observable(msgs)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal()
def test_with_latest_from_empty_never(self):
scheduler = TestScheduler()
msgs = [on_next(150, 1), on_completed(210)]
e1 = Observable.never()
e2 = scheduler.create_hot_observable(msgs)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(210))
def test_with_latest_from_empty_empty(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(210)]
msgs2 = [on_next(150, 1), on_completed(210)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(210))
def test_with_latest_from_empty_return(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(210)]
msgs2 = [on_next(150, 1), on_next(215, 2), on_completed(220)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(210))
def test_with_latest_from_return_empty(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(210)]
msgs2 = [on_next(150, 1), on_next(215, 2), on_completed(220)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(220))
def test_with_latest_from_never_return(self):
scheduler = TestScheduler()
msgs = [on_next(150, 1), on_next(215, 2), on_completed(220)]
e1 = scheduler.create_hot_observable(msgs)
e2 = Observable.never()
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(220))
def test_with_latest_from_return_never(self):
scheduler = TestScheduler()
msgs = [on_next(150, 1), on_next(215, 2), on_completed(210)]
e1 = scheduler.create_hot_observable(msgs)
e2 = Observable.never()
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal()
def test_with_latest_from_return_return(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(215, 2), on_completed(230)]
msgs2 = [on_next(150, 1), on_next(220, 3), on_completed(240)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_completed(230))
def test_with_latest_from_empty_error(self):
ex = 'ex'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(230)]
msgs2 = [on_next(150, 1), on_error(220, ex)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex))
def test_with_latest_from_error_empty(self):
ex = 'ex'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(230)]
msgs2 = [on_next(150, 1), on_error(220, ex)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex))
def test_with_latest_from_return_throw(self):
ex = 'ex'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(230)]
msgs2 = [on_next(150, 1), on_error(220, ex)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex))
def test_with_latest_from_throw_return(self):
ex = 'ex'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(230)]
msgs2 = [on_next(150, 1), on_error(220, ex)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex))
def test_with_latest_from_throw_throw(self):
ex1 = 'ex1'
ex2 = 'ex2'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_error(220, ex1)]
msgs2 = [on_next(150, 1), on_error(230, ex2)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex1))
def test_with_latest_from_error_throw(self):
ex1 = 'ex1'
ex2 = 'ex2'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(220, ex1)]
msgs2 = [on_next(150, 1), on_error(230, ex2)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e1.with_latest_from(e2, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_error(220, ex1))
def test_with_latest_from_throw_error
|
(self):
ex1 = 'ex1'
ex2 = 'ex2'
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(220, ex1)]
msgs2 = [on_next(150, 1), on_error(230, ex2)]
e1 = scheduler.create_hot_observ
|
able(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
def create():
return e2.with_latest_from(e1, lambda x, y: x + y)
results = scheduler.start(create)
results.messages.assert_equal(on_
|
xiandiancloud/edxplaltfom-xusong
|
lms/djangoapps/instructor/views/coupons.py
|
Python
|
agpl-3.0
| 6,252
| 0.003039
|
"""
E-commerce Tab Instructor Dashboard Coupons Operations views
"""
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.views.decorators.http import require_POST
from django.utils.translation import ugettext as _
from util.json_request import JsonResponse
from django.http import HttpResponse, HttpResponseNotFound
from shoppingcart.models import Coupon, CourseRegistrationCode
import logging
log = logging.getLogger(__name__)
@require_POST
@login_required
def remove_coupon(request, course_id): # pylint: disable=W0613
"""
remove the coupon against the coupon id
set the coupon is_active flag to false
"""
coupon_id = request.POST.get('id', None)
if not coupon_id:
return JsonResponse({
'message': _('coupon id is None')
}, status=400) # status code 400: Bad Request
try:
coupon = Coupon.objects.get(id=coupon_id)
except ObjectDoesNotExist:
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) DoesNotExist').format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
if not coupon.is_active:
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) is already inactive').format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
coupon.is_active = False
coupon.save()
return JsonResponse({
'message': _('coupon with the coupon id ({coupon_id}) updated successfully').format(coupon_id=coupon_id)
}) # status code 200: OK by default
@require_POST
@login_required
def add_coupon(request, course_id): # pylint: disable=W0613
"""
add coupon in the Coupons Table
"""
code = request.POST.get('code')
# check if the code is already in the Coupons Table and active
coupon = Coupon.objects.filter(is_active=True, code=code)
if coupon:
return HttpResponseNotFound(_("coupon with the coupon code ({code}) already exist").format(code=code))
# check if the coupon code is in the CourseRegistrationCode Table
course_registration_code = CourseRegistrationCode.objects.filter(code=code)
if course_registration_code:
return HttpResponseNotFound(_(
"The code ({code}) that you have tried to define is already in use as a registration code").format(code=code)
)
description = request.POST.get('description')
course_id = request.POST.get('course_id')
try:
discount = int(request.POST.get('discount'))
except ValueError:
return HttpResponseNotFound(_("Please Enter the Integer Value for Coupon Discount"))
if discount > 100:
return HttpResponseNotFound(_("Please Enter the Coupon Discount Value Less than or Equal to 100"))
coupon = Coupon(
code=code, description=description, course_id=course_id,
percentage_discount=discount, created_by_id=request.user.id
)
coupon.save()
return HttpResponse(_("coupon with the coupon code ({code}) added successfully").format(code=code))
@require_POST
@login_required
def update_coupon(request, course_id): # pylint: disable=W0613
"""
update the coupon object in the database
"""
coupon_id = request.POST.get('coupon_id', None)
if not coupon_id:
return HttpResponseNotFound(_("coupon id not found"))
try:
coupon = Coupon.objects.get(pk=coupon_id)
except ObjectDoesNotExist:
return HttpResponseNotFound(_("coupon with the coupon id ({coupon_id}) DoesNotExist").format(coupon_id=coupon_id))
code = request.POST.get('code')
filtered_coupons = Coupon.objects.filter(~Q(id=coupon_id), code=code, is_active=True)
if filtered_coupons:
return HttpResponseNotFound(_("coupon with the coupon id ({coupon_id}) already exists").format(coupon_id=coupon_id))
# check if the coupon code is in the CourseReg
|
istrationCode Table
course_registration_code = CourseRegistrationCode.objects.filter(code=code)
if course_registration_code:
return HttpResponseNotFound(_(
"The code ({code}) that you have tried to define is already in use as a registration code").format(code=code)
)
description = request.POST.get('description')
course_id = request.POST.get('course_id')
try:
discount = int(request.POST.get('discount'))
except ValueError
|
:
return HttpResponseNotFound(_("Please Enter the Integer Value for Coupon Discount"))
if discount > 100:
return HttpResponseNotFound(_("Please Enter the Coupon Discount Value Less than or Equal to 100"))
coupon.code = code
coupon.description = description
coupon.course_id = course_id
coupon.percentage_discount = discount
coupon.save()
return HttpResponse(_("coupon with the coupon id ({coupon_id}) updated Successfully").format(coupon_id=coupon_id))
@require_POST
@login_required
def get_coupon_info(request, course_id): # pylint: disable=W0613
"""
get the coupon information to display in the pop up form
"""
coupon_id = request.POST.get('id', None)
if not coupon_id:
return JsonResponse({
'message': _("coupon id not found")
}, status=400) # status code 400: Bad Request
try:
coupon = Coupon.objects.get(id=coupon_id)
except ObjectDoesNotExist:
return JsonResponse({
'message': _("coupon with the coupon id ({coupon_id}) DoesNotExist").format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
if not coupon.is_active:
return JsonResponse({
'message': _("coupon with the coupon id ({coupon_id}) is already inactive").format(coupon_id=coupon_id)
}, status=400) # status code 400: Bad Request
return JsonResponse({
'coupon_code': coupon.code,
'coupon_description': coupon.description,
'coupon_course_id': coupon.course_id.to_deprecated_string(),
'coupon_discount': coupon.percentage_discount,
'message': _('coupon with the coupon id ({coupon_id}) updated successfully').format(coupon_id=coupon_id)
}) # status code 200: OK by default
|
tiancj/emesene
|
emesene/e3/common/utils.py
|
Python
|
gpl-3.0
| 2,941
| 0.00306
|
# -*- coding: utf-8 -*-
# This file is part of emesene.
#
# emesene is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# emesene is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import MessageFormatter
import threading
def add_style_to_message(text, stl, escape=True):
'''add the style in a xhtml like syntax to text'''
if escape:
text = MessageFormatter.escape(text)
if stl is None:
return text
style_start = ''
style_end = ''
style = 'color: #' + stl.color.to_hex() + '; '
if stl.bold:
style_start = style_start + '<b>'
style_end = '</b>' + style_end
if stl.italic:
style_start = style_start + '<i>'
style_end = '</i>' + style_end
if stl.underline:
style_start = style_start + '<u>'
style_end = '</u>' + style_end
if stl.strike:
style_start = style_start + '<s>'
style_end = '</s>' + style_end
if stl.font:
style += "font-family: %s; " % (stl.font, )
style_start += '<span style="%s">' % (style, )
style_end = '</span>' + style_end
return style_start + text + style_end
class PercentDone(object):
def __init__(self, total):
self.__total = total
|
self.__current = 0
@property
def current(self):
return self.__current
@property
def total(s
|
elf):
return self.__total
@total.setter
def total(self, total):
self.__total = total
def notify(self, q):
aux = (int)((q/self.__total) * 100.0)
if aux == self.__current:
changed = False
else:
changed = True
self.__current = aux
return changed
class AsyncAction(threading.Thread):
def __init__(self, callback, func, *args, **kwargs):
threading.Thread.__init__(self)
self._result = None
self._callback = callback
self._func = func
self._args = args
self._kwargs = kwargs
self.start()
def run(self):
result = self._func(*self._args, **self._kwargs)
self._callback(result)
def is_finished(self):
return self._result is None
def get_result(self):
return self._result
|
dials/dials
|
tests/algorithms/indexing/test_symmetry.py
|
Python
|
bsd-3-clause
| 7,130
| 0.001823
|
from __future__ import annotations
import pytest
import scitbx.matrix
from cctbx import crystal, sgtbx, uctbx
from cctbx.sgtbx import bravais_types
from dxtbx.model import Crystal
from dials.algorithms.indexing import symmetry
@pytest.mark.parametrize("space_group_symbol", bravais_types.acentric)
def test_SymmetryHandler(space_group_symbol):
sgi = sgtbx.space_group_info(symbol=space_group_symbol)
sg = sgi.group()
cs = sgi.any_compatible_crystal_symmetry(volume=10000)
uc = cs.unit_cell()
handler = symmetry.SymmetryHandler(unit_cell=uc, space_group=sg)
assert (
handler.target_symmetry_primitive.space_group()
== sg.build_derived_patterson_group().info().primitive_setting().group()
)
assert (
handler.target_symmetry_reference_setting.space_group()
== sg.build_derived_patterson_group().info().reference_setting().group()
)
# test apply_symmetry on the primitive setting
cs_primitive = cs.primitive_setting()
B = scitbx.matrix.sqr(
cs_primitive.unit_cell().fractionalization_matrix()
).transpose()
crystal = Crystal(B, sgtbx.space_group())
crystal_new, cb_op = handler.apply_symmetry(crystal)
crystal_new.get_crystal_symmetry(assert_is_compatible_unit_cell=True)
# test apply_symmetry on the minimum cell setting
cs_min_cell = cs.minimum_cell()
B = scitbx.matrix.sqr(
cs_min_cell.unit_cell().fractionalization_matrix()
).transpose()
crystal = Crystal(B, sgtbx.space_group())
crystal_new, cb_op = handler.apply_symmetry(crystal)
crystal_new.get_crystal_symmetry(assert_is_compatible_unit_cell=True)
handler = symmetry.SymmetryHandler(space_group=sg)
assert handler.target_symmetry_primitive.unit_cell() is None
assert (
handler.target_symmetry_primitive.space_group()
== sg.build_derived_patterson_group().info().primitive_setting().group()
)
assert handler.target_symmetry_reference_setting.unit_cell() is None
assert (
handler.target_symmetry_reference_setting.space_group()
== sg.build_derived_patterson_group().info().reference_setting().group()
)
# test apply_symmetry on the primitive setting
cs_primitive = cs.primitive_setting()
B = scitbx.matrix.sqr(
cs_primitive.unit_cell().fractionalization_matrix()
).transpose()
crystal = Crystal(B, sgtbx.space_group())
crystal_new, cb_op = handler.apply_symmetry(crystal)
crystal_new.get_crystal_symmetry(assert_is_compatible_unit_cell=True)
handler = symmetry.SymmetryHandler(
unit_cell=cs_min_cell.unit_cell(),
space_group=sgtbx.space_group(),
)
assert handler.target_symmetry_primitive.unit_cell().volume() == pytest.approx(
cs_min_cell.unit_
|
cell().volume()
)
assert handler.target_symmetry_primitive.space_group() == sgtbx.space_group("P-1")
assert (
handler.target_symmetry_reference_setting.unit_cell().volume()
== pytest.approx(cs_min_cell.unit_cell().volume())
)
assert handler.target_symmetry_reference_setting.space_group() == sgtbx.space_group(
"P-1"
)
# https://github.com/dials/dials/issues/1254
def test_SymmetryHandler_no_match():
sgi = sgtbx.spac
|
e_group_info(symbol="P422")
cs = sgi.any_compatible_crystal_symmetry(volume=10000)
B = scitbx.matrix.sqr(cs.unit_cell().fractionalization_matrix()).transpose()
crystal = Crystal(B, sgtbx.space_group())
handler = symmetry.SymmetryHandler(
unit_cell=None, space_group=sgtbx.space_group_info("I23").group()
)
assert handler.apply_symmetry(crystal) == (None, None)
# https://github.com/dials/dials/issues/1217
@pytest.mark.parametrize(
"crystal_symmetry",
[
crystal.symmetry(
unit_cell=(
44.66208171,
53.12629403,
62.53397661,
64.86329707,
78.27343894,
90,
),
space_group_symbol="C 1 2/m 1 (z,x+y,-2*x)",
),
crystal.symmetry(
unit_cell=(44.3761, 52.5042, 61.88555952, 115.1002877, 101.697107, 90),
space_group_symbol="C 1 2/m 1 (-z,x+y,2*x)",
),
],
)
def test_symmetry_handler_c2_i2(crystal_symmetry):
cs_ref = crystal_symmetry.as_reference_setting()
cs_ref = cs_ref.change_basis(
cs_ref.change_of_basis_op_to_best_cell(best_monoclinic_beta=False)
)
cs_best = cs_ref.best_cell()
# best -> ref is different to cs_ref above
cs_best_ref = cs_best.as_reference_setting()
assert not cs_ref.is_similar_symmetry(cs_best_ref)
B = scitbx.matrix.sqr(
crystal_symmetry.unit_cell().fractionalization_matrix()
).transpose()
cryst = Crystal(B, sgtbx.space_group())
for cs in (crystal_symmetry, cs_ref, cs_best):
print(cs)
handler = symmetry.SymmetryHandler(space_group=cs.space_group())
new_cryst, cb_op = handler.apply_symmetry(cryst)
assert (
new_cryst.change_basis(cb_op).get_crystal_symmetry().is_similar_symmetry(cs)
)
for cs in (crystal_symmetry, cs_ref, cs_best, cs_best_ref):
print(cs)
handler = symmetry.SymmetryHandler(
unit_cell=cs.unit_cell(), space_group=cs.space_group()
)
new_cryst, cb_op = handler.apply_symmetry(cryst)
assert (
new_cryst.change_basis(cb_op).get_crystal_symmetry().is_similar_symmetry(cs)
)
crystal_symmetries = []
cs = crystal.symmetry(
unit_cell=uctbx.unit_cell("76, 115, 134, 90, 99.07, 90"),
space_group_info=sgtbx.space_group_info(symbol="I2"),
)
crystal_symmetries.append(
crystal.symmetry(
unit_cell=cs.minimum_cell().unit_cell(), space_group=sgtbx.space_group()
)
)
cs = crystal.symmetry(
unit_cell=uctbx.unit_cell("42,42,40,90,90,90"),
space_group_info=sgtbx.space_group_info(symbol="P41212"),
)
crystal_symmetries.append(cs.change_basis(sgtbx.change_of_basis_op("c,a,b")))
for symbol in bravais_types.acentric:
sgi = sgtbx.space_group_info(symbol=symbol)
cs = crystal.symmetry(
unit_cell=sgi.any_compatible_unit_cell(volume=1000), space_group_info=sgi
)
cs = cs.niggli_cell().as_reference_setting().primitive_setting()
crystal_symmetries.append(cs)
@pytest.mark.parametrize("crystal_symmetry", crystal_symmetries)
def test_find_matching_symmetry(crystal_symmetry):
cs = crystal_symmetry
cs.show_summary()
for op in ("x,y,z", "z,x,y", "y,z,x", "-x,z,y", "y,x,-z", "z,-y,x")[:]:
cb_op = sgtbx.change_of_basis_op(op)
uc_inp = cs.unit_cell().change_basis(cb_op)
for ref_uc, ref_sg in [
(cs.unit_cell(), cs.space_group()),
(None, cs.space_group()),
][:]:
best_subgroup = symmetry.find_matching_symmetry(
uc_inp, target_space_group=ref_sg
)
cb_op_inp_best = best_subgroup["cb_op_inp_best"]
assert uc_inp.change_basis(cb_op_inp_best).is_similar_to(
cs.as_reference_setting().best_cell().unit_cell()
)
|
tux-00/ansible
|
test/units/module_utils/facts/test_collectors.py
|
Python
|
gpl-3.0
| 12,595
| 0.001032
|
# unit tests for ansible fact collectors
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.compat.tests.mock import Mock, patch
from . base import BaseFactsTest
from ansible.module_utils.facts import collector
from ansible.module_utils.facts.system.apparmor import ApparmorFactCollector
from ansible.module_utils.facts.system.caps import SystemCapabilitiesFactCollector
from ansible.module_utils.facts.system.cmdline import CmdLineFactCollector
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
from ansible.module_utils.facts.system.dns import DnsFactCollector
from ansible.module_utils.facts.system.env import EnvFactCollector
from ansible.module_utils.facts.system.fips import FipsFactCollector
from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector
from ansible.module_utils.facts.system.platform import PlatformFactCollector
from ansible.module_utils.facts.system.python import PythonFactCollector
from ansible.module_utils.facts.system.selinux import SelinuxFactCollector
from ansible.module_utils.facts.system.service_mgr import ServiceMgrFactCollector
from ansible.module_utils.facts.system.ssh_pub_keys import SshPubKeyFactCollector
from ansible.module_utils.facts.system.user import UserFactCollector
from ansible.module_utils.facts.virtual.base import VirtualCollector
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.hardware.base import HardwareCollector
class CollectorException(Exception):
pass
class ExceptionThrowingCollector(collector.BaseFactCollector):
name = 'exc_throwing'
def __init__(self, collectors=None, namespace=None, exception=None):
super(ExceptionThrowingCollector, self).__init__(collectors, namespace)
self._exception = exception or CollectorException('collection failed')
def collect(self, module=None, collected_facts=None):
raise self._exception
class TestExceptionThrowingCollector(BaseFactsTest):
__test__ = True
gather_subset = ['exc_throwing']
valid_subsets = ['exc_throwing']
collector_class = ExceptionThrowingCollector
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect,
module=module,
collected_facts=self.collected_facts)
def test_collect_with_namespace(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect_with_namespace,
module=module,
collected_facts=self.collected_facts)
class TestApparmorFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'apparmor']
valid_subsets = ['apparmor']
fact_namespace = 'ansible_apparmor'
collector_class = ApparmorFactCollector
def test_collect(self):
facts_dict = super(TestApparmorFacts, self).test_collect()
self.assertIn('status', facts_dict['apparmor'])
class TestCapsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'caps']
valid_subsets = ['caps']
fact_namespace = 'ansible_system_capabilities'
collector_class = SystemCapabilitiesFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/usr/sbin/capsh')
mock_module.run_command = Mock(return_value=(0, 'Current: =ep', ''))
return mock_module
class TestCmdLineFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'cmdline']
valid_subsets = ['cmdline']
fact_namespace = 'ansible_cmdline'
collector_class = CmdLineFactCollector
class TestDistributionFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'distribution']
valid_subsets = ['distribution']
fact_namespace = 'ansible_distribution'
collector_class = DistributionFactCollector
class TestDnsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'dns']
valid_subsets = ['dns']
fact_namespace = 'ansible_dns'
collector_class = DnsFactCollector
class TestEnvFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'env']
valid_subsets = ['env']
fact_namespace = 'ansible_env'
collector_class = EnvFactCollector
def test_collect(self):
facts_dict = super(TestEnvFacts, self).test_collect()
self.assertIn('HOME', facts_dict['env'])
class TestFipsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'fips']
valid_subsets = ['fips']
fact_namespace = 'ansible_fips'
collector_class = FipsFactCollector
class TestHardwareCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'hardware']
valid_subsets = ['hardware']
fact_namespace = 'ansible_hardware'
collector_class = HardwareCollector
collected_facts = {'ansible_architecture': 'x86_64'}
class TestNetworkCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'network']
valid_subsets = ['network']
fact_namespace = 'ansible_network'
collector_class = NetworkCollector
class TestPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
class TestPlatformFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'platform']
valid_subsets = ['platform']
fact_namespace = 'ansible_platform'
collector_class = PlatformFactCollector
class TestPythonFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'python']
valid_subsets = ['python']
fact_namespace = 'ansible_python'
|
collector_class = PythonFactCollector
class TestSelinuxFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'selinux']
valid_subsets = ['selinux']
fact_namespace = 'ansible_selinux'
collector_class = SelinuxFactCollector
def test_no_selinux(self):
with patch('ansible.module_utils.facts.system.selinux.HAVE_SELINUX', False):
module = self._mock_module()
|
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertFalse(facts_dict['selinux'])
return facts_dict
class TestServiceMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'service_mgr']
valid_subsets = ['service_mgr']
fact_namespace = 'ansible_service_mgr'
collector_class = ServiceMgrFactCollector
# TODO: dedupe some of this test code
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
def test_no_proc1(self, mock_gfc):
# no /proc/1/comm, ps returns non-0
# should fallback to 'service'
module = self._mock_module()
module.run_command = Mock(return_value=(1, '', 'wat'))
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
|
ikben/troposphere
|
troposphere/fms.py
|
Python
|
bsd-2-clause
| 1,072
| 0
|
# Copyright (c) 2012-2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
|
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checke
|
r, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/paris/household_x_neighborhood/age_lnprice.py
|
Python
|
gpl-2.0
| 1,876
| 0.032516
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
# This is a simple test variable for the interaction of gridcells and households.
from opus_core.variables.variable import Variable
from urbansim.functions import attribute_label
class age_lnprice(Variable):
"""Test variab
|
le for the interaction of neighborhoods and households.
Computes household.lhhincpc * neighborhood.ln_price."""
def dependencies(self):
return [attribute_label("neighborhood", "ln_price"),
"paris.household.agetrans"]
def compute(self, dataset_pool):
return self.get_dataset().multiply("agetrans","ln_price")
#if __name_
|
_=='__main__':
#from opus_core.tests import opus_unittest
#from urbansim.variable_test_toolbox import VariableTestToolbox
#from numpy import array
#from numpy import ma
#class Tests(opus_unittest.OpusTestCase):
#variable_name = "urbansim.household_x_neighborhood.hhrich_nbpoor"
#def test_full_tree(self):
#dept = array([10, 20, 30])
#prev_dept = array([10, 4, 20, 30])
#values = VariableTestToolbox().compute_variable(self.variable_name,
#{"neighborhood":{
#"dept":dept},
#"household":{
#"prev_dept":prev_dept}},
#dataset = "household_x_neighborhood")
#should_be = array([[1, 0, 0],
#[0, 0, 0],
#[0, 1, 0],
#[0, 0, 1]])
#self.assertEqual(ma.allclose(values, should_be, rtol=1e-20),
#True, msg = "Error in " + self.variable_name)
#opus_unittest.main()
|
rshk/ardomino-api
|
ardomino/tests/test_configuration.py
|
Python
|
bsd-3-clause
| 3,008
| 0
|
"""
Tests for configuration file parsers, ...
"""
from ConfigParser import RawConfigParser
import io
import os
import textwrap
import pytest
from ardomino.conf import (process_conf_files,
find_configuration_files,
create_conf_parser)
@pytest.fixture
def conf_dir(tmpdir):
with open(str(tmpdir.join('food.ini')), 'w') as f:
f.write(textwrap.dedent("""
[food:Egg]
taste = Great
[food:Bacon]
taste = Delicious
[food:Spam]
taste = Sublime
"""))
with open(str(tmpdir.join('beverages.ini')), 'w') as f:
f.write(textwrap.dedent("""
[beverage:Coffee]
color = black
[beverage:Milk]
color = white
[beverage:Tea]
color = brown
"""))
with open(str(tmpdir.join('pets.ini')), 'w') as f:
f.write(textwrap.dedent("""
[pet:Cat]
says = Meow
[pet:Dog]
says = Bark
[pet:Snake]
says =
|
"""))
with open(str(tm
|
pdir.join('not-a-conf-file.txt')), 'w') as f:
f.write(textwrap.dedent("""
[this-is:not]
what = a configuration file!
"""))
return tmpdir
def test_create_conf_parser(conf_dir):
conf_parser = create_conf_parser(str(conf_dir))
assert 'pet:Cat' in conf_parser.sections()
assert 'food:Bacon' in conf_parser.sections()
assert 'beverage:Coffee' in conf_parser.sections()
assert 'this-is:not' not in conf_parser.sections()
def test_find_configuration_files(conf_dir):
found = find_configuration_files(str(conf_dir))
assert sorted(found) == [
str(conf_dir.join(n))
for n in 'beverages.ini', 'food.ini', 'pets.ini'
]
def test_process_conf_files():
example_conf = textwrap.dedent("""
[food:Egg]
description = A nice round egg
[food:Spam]
description = Spam Spam Spam Spam Spam!
[person:JohnDoe]
first_name = John
last_name = Doe
""")
expected_result = {
'food': {
'Egg': {'description': 'A nice round egg'},
'Spam': {'description': 'Spam Spam Spam Spam Spam!'},
},
'person': {
'JohnDoe': {
'first_name': 'John',
'last_name': 'Doe',
}
}
}
conf_parser = RawConfigParser()
conf_parser.readfp(io.BytesIO(example_conf))
result = process_conf_files(conf_parser)
assert result == expected_result
pass
def test_process_conf_dir(conf_dir):
parser = create_conf_parser(str(conf_dir))
obj = process_conf_files(parser)
assert sorted(obj.keys()) == ['beverage', 'food', 'pet']
assert sorted(obj['beverage'].keys()) == ['Coffee', 'Milk', 'Tea']
assert sorted(obj['food'].keys()) == ['Bacon', 'Egg', 'Spam']
assert sorted(obj['pet'].keys()) == ['Cat', 'Dog', 'Snake']
assert obj['food']['Bacon'] == {'taste': 'Delicious'}
assert obj['food']['Spam'] == {'taste': 'Sublime'}
|
xasos/crowdsource-platform
|
crowdsourcing/migrations/0041_auto_20150825_0240.py
|
Python
|
mit
| 1,146
| 0.001745
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0040_auto_20150824_2013'),
|
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={'ordering': ['created_timestamp']},
),
migrations.RenameField(
model_name='taskcomment',
old_name='module',
|
new_name='task',
),
migrations.AlterField(
model_name='module',
name='feedback_permissions',
field=models.IntegerField(default=1, choices=[(1, b'Others:Read+Write::Workers:Read+Write'), (2, b'Others:Read::Workers:Read+Write'), (3, b'Others:Read::Workers:Read'), (4, b'Others:None::Workers:Read')]),
),
migrations.AlterField(
model_name='taskworker',
name='task_status',
field=models.IntegerField(default=1, choices=[(1, b'In Progress'), (2, b'Submitted'), (3, b'Accepted'), (4, b'Rejected'), (5, b'Returned'), (6, b'Skipped')]),
),
]
|
kcsry/django-form-designer
|
form_designer/apps.py
|
Python
|
bsd-3-clause
| 193
| 0
|
from d
|
jango.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class FormDesignerConfig(AppConfig):
name = 'form_designer'
verbose_nam
|
e = _("Form Designer")
|
OnFTA/scrapy-training
|
crawler_film/crawler_film/pipelines.py
|
Python
|
mit
| 1,062
| 0.002825
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
class CrawlerPipeline(object):
def __ini
|
t__(self, mongo_uri, mongo_db):
self.mongo_uri = mongo_uri
self.mongo_db = mongo_db
@classmethod
def from_crawler(cls, crawler):
return cls(
mongo_uri=crawler.settings.get('MONGO_URI'),
mongo_db=crawler.settings.get('MONGO_DATABASE', 'crawler_film')
)
def open_spider(self, spider):
self.collection_name = spider.name
self.client = pymongo.MongoClient(self.mongo_uri)
self.db = self.client[self.mongo_db]
|
# disable drop by default
self.db.drop_collection(self.collection_name)
def close_spider(self, spider):
self.client.close()
def process_item(self, item, spider):
self.db[self.collection_name].update({'url': item['url']}, dict(item), upsert=True)
return item
|
jetspace/jetlibs
|
docs/source/conf.py
|
Python
|
mit
| 10,239
| 0.006739
|
# -*- coding: utf-8 -*-
#
# Jetlibs documentation build configuration file, created by
# sphinx-quickstart on Wed Dec 23 16:22:13 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Jetlibs'
copyright = u'2015, Marius Messerschmidt'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Jetlibsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Jetlibs.tex', u'Jetlibs Documentation',
u'Marius Messerschmidt', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true,
|
show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
|
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jetlibs', u'Jetlibs Documentation',
[u'Marius Messerschmidt'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Jetlibs', u'Jetlibs Documentation',
u'Marius Messerschmidt', 'Jetlibs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options fo
|
seishei/multiprocess
|
py2.5/examples/ex_synchronize.py
|
Python
|
bsd-3-clause
| 6,159
| 0.004221
|
#
# A test file for the `processing` package
#
import time, sys, random
from Queue import Empty
import processing # may get overwritten
#### TEST_VALUE
def value_func(running, mutex):
random.seed()
time.sleep(random.random()*4)
mutex.acquire()
print '\n\t\t\t' + str(processing.currentProcess()) + ' has finished'
running.value -= 1
mutex.release()
def test_value():
TASKS = 10
running = processing.Value('i', TASKS)
mutex = processing.Lock()
for i in range(TASKS):
processing.Process(target=value_func, args=(running, mutex)).start()
while running.value > 0:
time.sleep(0.08)
mutex.acquire()
print running.value,
sys.stdout.flush()
mutex.release()
print
print 'No more running processes'
#### TEST_QUEUE
def queue_func(queue):
for i in range(30):
time.sleep(0.5 * random.random())
queue.put(i*i)
queue.put('STOP')
def test_queue():
q = processing.Queue()
p = processing.Process(target=queue_func, args=(q,))
p.start()
o = None
while o != 'STOP':
try:
o = q.get(timeout=0.3)
print o,
sys.stdout.flush()
except Empty:
print 'TIMEOUT'
print
#### TEST_CONDITION
def condition_func(cond):
cond.acquire()
print '\t' + str(cond)
time.sleep(2)
print '\tchild is notifying'
print '\t' + str(cond)
cond.notify()
cond.release()
def test_condition():
cond = processing.Condition()
p = processing.Process(target=condition_func, args=(cond,))
print cond
cond.acquire()
print cond
cond.acquire()
print cond
p.start()
print 'main is waiting'
cond.wait()
print 'main has woken up'
print cond
cond.release()
print cond
cond.release()
p.join()
print cond
#### TEST_SEMAPHORE
def semaphore_func(sema, mutex, running):
sema.acquire()
mutex.acquire()
running.value += 1
print running.value, 'tasks are running'
mutex.release()
random.seed()
time.sleep(random.random()*2)
mutex.acquire()
running.value -= 1
print '%s has finished' % processing.currentProcess()
mutex.release()
sema.release()
def test_semaphore():
sema = processing.Semaphore(3)
mutex = processing.RLock()
running = processing.Value('i', 0)
processes = [
processing.Process(target=semaphore_func, args=(sema, mutex, running))
for i in range(10)
]
for p in processes:
p.start()
for p in processes:
p.join()
#### TEST_JOIN_TIMEOUT
def join_timeout_func():
print '\tchild sleeping'
time.sleep(5.5)
print '\n\tchild terminating'
def test_join_timeout():
p = processing.Process(target=join_timeout_func)
p.start()
print 'waiting for process to finish'
while 1:
p.join(timeout=1)
if not p.isAlive():
break
print '.',
sys.stdout.flush()
#### TEST_EVENT
def event_func(event):
print '\t%r is waiting' % processing.currentProcess()
event.wait()
print '\t%r has woken up' % processing.currentProcess()
def test_event():
event = processing.Event()
processes = [processing.Process(target=event_func, args=(event,))
for i in range(5)]
for p in processes:
p.start()
print 'main is sleeping'
time.sleep(2)
print 'main is setting event'
event.set()
for p in processes:
p.join()
#### TEST_SHAREDVALUES
def sharedvalues_func(values, arrays, shared_values, shared_arrays):
for i in range(len(values)):
v = values[i][1]
sv = shared_values[i].value
assert v == sv
for i in range(len(values)):
a = arrays[i][1]
sa = list(shared_arrays[i][:])
assert a == sa
print 'Tests passed'
def test_sharedvalues():
values = [
('i', 10),
('h', -2),
('d', 1.25)
]
arrays = [
('i', range(100)),
('d', [0.25 * i for i in range(100)]),
('H', range(1000))
]
shared_values = [processing.Value(id, v) for id, v in values]
shared_arrays = [processing.Array(id, a) for id, a in arrays]
p = processing.Process(
target=sharedvalues_func,
args=(values, arrays, shared_values, shared_arrays)
)
p.start()
p.join()
assert p.getExitCode() == 0
####
def test(namespace=processing):
global processing
processing = namespace
for func in [ test_value, test_queue, test_condition,
test_semaphore, test_join_timeout, test_event,
test_sharedvalues ]:
print '\n\t######## %s\n' % func.__name__
func()
ignore = processing.activeChildren() # cleanup any old processes
if hasattr(processing, '_debugInfo'):
info = processing._debugInfo()
if info:
print info
raise ValueError, 'there should be no pos
|
itive refcou
|
nts left'
if __name__ == '__main__':
processing.freezeSupport()
assert len(sys.argv) in (1, 2)
if len(sys.argv) == 1 or sys.argv[1] == 'processes':
print ' Using processes '.center(79, '-')
namespace = processing
elif sys.argv[1] == 'manager':
print ' Using processes and a manager '.center(79, '-')
namespace = processing.Manager()
namespace.Process = processing.Process
namespace.currentProcess = processing.currentProcess
namespace.activeChildren = processing.activeChildren
elif sys.argv[1] == 'threads':
print ' Using threads '.center(79, '-')
import processing.dummy as namespace
else:
print 'Usage:\n\t%s [processes | manager | threads]' % sys.argv[0]
raise SystemExit, 2
test(namespace)
|
aacebedo/raspbian-docker-images
|
seafile/files/seafile-installer.py
|
Python
|
gpl-3.0
| 9,507
| 0.019565
|
#!/usr/bin/env python3
import pexpect
import sys
import argparse
import logging
from logging import StreamHandler
import traceback
import os
import quik
from quik import Template
import fileinput
import re
import tarfile
import fnmatch
ROOTLOGGER = logging.getLogger("seafileinstaller")
class SeafileInstaller:
@staticmethod
def init_loggers():
"""
Initialize loggers of the program
"""
formatter = logging.Formatter('%(message)s')
hdlr = StreamHandler(sys.stdout)
hdlr.setLevel(1)
hdlr.setFormatter(formatter)
ROOTLOGGER.addHandler(hdlr)
@staticmethod
def parse_args(raw_args):
"""
Function to parse the command line arguments
"""
# Create main parser
parser = argparse.ArgumentParser(
prog="Seafile installer",
description='Seafile installer
|
')
parser.add_argument(
'--install-dir',
required=True,
help='Install directory',
type=str)
parser.add_argument(
'--server-name',
required=True,
help='Serve
|
r name',
type=str)
parser.add_argument(
'--server-host',
required=True,
help="Server's host ip or domain",
type=str)
parser.add_argument(
'--data-dir',
required=True,
help="Directory where data will be stored",
type=str)
parser.add_argument(
'--nginx-dir',
required=True,
help="Directory where nginx is installed",
type=str)
parser.add_argument(
'--admin-email',
required=True,
help="Administator email",
type=str)
parser.add_argument(
'--admin-password',
required=True,
help="Administator password",
type=str)
parser.add_argument(
'archive',
help="Seafile archive path",
type=str)
parser.add_argument(
'--server-port',
default=8082,
type=int)
return parser.parse_args(raw_args)
@staticmethod
def install(install_dir, archive_path, host, server_name, port, data_dir):
os.makedirs(install_dir,exist_ok=True)
tar = tarfile.open(archive_path)
tar.extractall(path=install_dir)
tar.close()
setup_file_path = None
for root, dirnames, filenames in os.walk(install_dir):
for filename in fnmatch.filter(filenames, 'setup-seafile.sh'):
setup_file_path = os.path.join(root, filename)
if setup_file_path != None:
child = pexpect.spawnu(setup_file_path, timeout=180)
child.logfile = sys.stdout
if os.getuid() == 0:
child.expect_exact("You are running this script as ROOT. Are you sure to continue?")
child.sendline("yes")
child.expect_exact("Press [ENTER] to continue")
child.sendline()
child.expect_exact("[server name]:")
child.sendline(server_name)
child.expect_exact("[This server's ip or domain]:")
child.sendline(host)
child.expect_exact("[default: /opt/seafile/seafile-data ]")
child.sendline(data_dir)
child.expect_exact("[default: 8082 ]")
child.sendline(str(port))
child.expect_exact("If you are OK with the configuration, press [ENTER] to continue.")
child.sendline()
child.expect_exact("Now let's setup seahub configuration. Press [ENTER] to continue")
child.sendline()
setup_dir = os.path.dirname(setup_file_path)
os.symlink(setup_dir,os.path.join(os.path.dirname(setup_dir),"seafile-server-latest"))
os.symlink(os.path.join(os.path.dirname(setup_dir),"seafile-server-latest","seafile.sh"),os.path.join("/","usr","local","bin","seafile"))
os.symlink(os.path.join(os.path.dirname(setup_dir),"seafile-server-latest","seahub.sh"),os.path.join("/","usr","local","bin","seahub"))
else:
raise Exception("Unable to find installer")
@staticmethod
def generate_nginx_config(nginx_dir, host, install_dir):
template = Template("""
server {
listen 80 default;
server_name _;
return 301 https://$host$request_uri;
}
server {
listen 443;
server_name @seafile_host;
proxy_set_header X-Forwarded-For $remote_addr;
location / {
fastcgi_pass 127.0.0.1:8000;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_param PATH_INFO $fastcgi_script_name;
fastcgi_param SERVER_PROTOCOL $server_protocol;
fastcgi_param QUERY_STRING $query_string;
fastcgi_param REQUEST_METHOD $request_method;
fastcgi_param CONTENT_TYPE $content_type;
fastcgi_param CONTENT_LENGTH $content_length;
fastcgi_param SERVER_ADDR $server_addr;
fastcgi_param SERVER_PORT $server_port;
fastcgi_param SERVER_NAME $server_name;
fastcgi_param REMOTE_ADDR $remote_addr;
access_log /var/log/nginx/seahub.access.log;
error_log /var/log/nginx/seahub.error.log;
fastcgi_read_timeout 36000;
}
location /seafhttp {
rewrite ^/seafhttp(.*)$ $1 break;
proxy_pass http://127.0.0.1:8082;
client_max_body_size 0;
proxy_connect_timeout 36000s;
proxy_read_timeout 36000s;
proxy_send_timeout 36000s;
send_timeout 36000s;
}
location /media {
root @seafile_install_dir/seafile-server-latest/seahub;
}
}
""")
output_file_path = os.path.join(nginx_dir,"etc","nginx","sites-enabled","seafile")
os.makedirs(os.path.dirname(output_file_path),exist_ok=True)
output_file = open(output_file_path,'w')
output_file.write(template.render({"seafile_host":host, "seafile_install_dir":install_dir}))
output_file.close()
found = False
output_file_path = os.path.join(nginx_dir,"etc","nginx","nginx.conf")
with fileinput.FileInput(output_file_path, inplace=True, backup='.bak') as file:
for line in file:
if (re.match("daemon .*;",line) != None):
found = True
print(re.sub("daemon .*;", "daemon off;", line),end='')
else:
print(line,end='')
if found == False:
output_file = open(output_file_path,"a")
output_file.write("daemon off;")
output_file.close()
@staticmethod
def generate_seahub_settings(install_dir, host):
found = False
output_file_path = os.path.join(install_dir,"conf","seahub_settings.py")
new_root = "FILE_SERVER_ROOT = 'http://{}/seafhttp'".format(host)
with fileinput.FileInput(output_file_path, inplace=True, backup='.bak') as file:
for line in file:
if (re.match("FILE_SERVER_ROOT = '.*'",line) != None):
found = True
print(re.sub("FILE_SERVER_ROOT = '.*'", new_root, line),end='')
else:
print(line,end='')
if found == False:
output_file = open(output_file_path,"a")
output_file.write(new_root)
output_file.close()
@staticmethod
def generate_ccnet(install_dir, host):
found = False
output_file_path = os.path.join(install_dir,"conf","ccnet.conf"),
new_service = "SERVICE_URL = http://{}".format(host)
with fileinput.FileInput(output_file_path, inplace=True, backup='.bak') as file:
for line in file:
if (re.match("SERVICE_URL = .*",line) != None):
found = True
print(re.sub("SERVICE_URL = .*", new_service, line),end='')
else:
print(line,end='')
if found == False:
output_file = open(output_file_path,"a")
output_file.write(new_service)
output_file.close()
@staticmethod
def configure_seahub(install_dir,admin_email, admin_password):
child = pexpect.spawnu("{}".format(os.path.join(install_dir,"seafile-server-latest","seafile.sh")),["start"],timeout=180)
child.expect(pexpect.EOF)
child = pexpect.spawnu("{}".format(os.path.join(install_dir,"seafile-server-latest","seahub.sh")),["start-fastcgi"],timeout=180)
child.logfile = sys.stdout
child.expect_exact("[ admin
|
dotsonlab/AWSC-Toilet
|
flow.py
|
Python
|
mit
| 2,391
| 0.013802
|
'''
David Rodriguez
Goal: Continuously looping while to perform valve actions at specified times,
introduce substance at a specific ratio based on flow data, recording
and saving flow data, and actuating a flush at a specified time.
Inputs: A schedule of events based on entered times.
Outputs: Sequence of events to a screen as they happen.
daily flow rate data
'''
#logic testing Below is the functional portion
import datetime
import time
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
class Flow:
GPIO.setup("P8_7", GPIO.OUT) #stepper direction
GPIO.setup("P9_16", GPIO.OUT) #step
GPIO.setup("P8_11", GPIO.OUT) #(0: enabled, 1: disabled)
GPIO.setup("P8_15", GPIO.OUT) #Direction for actuator 1
GPIO.setup("P8_17", GPIO.OUT) #PWM for actuator 1
GPIO.setup("P8_16", GPIO.OUT) #Direction for actuator 2
GPIO.setup("P8_18", GPIO.OUT) #PWM for actuator 2
GPIO.output("P8_7", GPIO.LOW) #set stepper motor direction
tag = ""
def enableStepper(self):
GPIO.output("P8_11", GPIO.LOW)
def disableStepper(self):
GPIO.output("P8_11", GPIO.HIGH)
def trigg
|
erStepper(self):
time.sleep(0.5)
PWM.start("P9_16", 25, 100, 1)
time.sleep(2)
PWM.set_frequency("P9_16", 250)
time.sleep(90)
PWM.stop("P9_16")
PWM.cleanup()
def toiletTrigger(self, flushType):
if flushType == "Full":
self.toil
|
etFull()
else:
self.toiletUrine()
def toiletUrine(self):
print "Toilet Urine Triggered"
self.enableStepper()
self.triggerStepper()
GPIO.output("P8_17", GPIO.HIGH) #pwm on
GPIO.output("P8_15", GPIO.LOW) #extend actuator
time.sleep(.65)
GPIO.output("P8_15", GPIO.HIGH) #retract actuator
time.sleep(2)
GPIO.output("P8_17", GPIO.LOW) #pwn off
self.disableStepper()
def toiletFull(self):
print "Toilet Full Triggered"
self.enableStepper()
self.triggerStepper()
GPIO.output("P8_18", GPIO.HIGH) #pwm on
GPIO.output("P8_16", GPIO.LOW) #extend actuator
time.sleep(.65)
GPIO.output("P8_16", GPIO.HIGH) #retract actuator
time.sleep(1)
GPIO.output("P8_18", GPIO.LOW) #pwm on
self.disableStepper()
|
bauhaus93/webcrawler
|
ui_infopanel.py
|
Python
|
gpl-2.0
| 1,369
| 0.045289
|
import wx
import functions
infoItems=[ ("active time", functions.FormatTime),
("active workers", None),
("active tasks", None),
("tasks done", None),
("pending urls", None),
("unique urls found", None),
("bytes read", functions.FormatByte),
("processing speed", func
|
tions.FormatByteSpeed),
("current processing speed", functions.FormatByteSpeed),
("work time", functions.FormatTime),
("errors
|
", None),
("invalid data", None),
("http 1xx", None),
("http 2xx", None),
("http 3xx", None),
("http 4xx", None),
("http 5xx",None)]
class InfoPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
self.sizer=wx.GridSizer(rows=len(infoItems), cols=2)
self.text={}
for key, f in infoItems:
self.text[key+"_TITLE"]=wx.StaticText(self, label=key+":")
self.text[key]=wx.StaticText(self)
self.text[key].SetMaxSize((-1, 20))
self.text[key+"_TITLE"].SetMaxSize((-1, 20))
self.sizer.Add(self.text[key+"_TITLE"], 1, wx.EXPAND)
self.sizer.Add(self.text[key], 1, wx.EXPAND)
self.sizer.SetVGap(1)
self.SetAutoLayout(True)
self.SetSizer(self.sizer)
self.Layout()
def Update(self, info):
for key, f in infoItems:
if f:
val=f(info[key])
else:
val=str(info[key])
if self.text[key].GetLabel()!=val:
self.text[key].SetLabel(val)
self.Layout()
|
gillesdegottex/dfasma
|
test/synth_grid.py
|
Python
|
gpl-3.0
| 1,049
| 0.014299
|
import numpy as np
#import scipy.io.wavfile
#import scipy.signal
import pysndfile
import matplotlib.pyplot as plt
plt.ion()
def db2mag(d):
return 10.0*
|
*(d/20.0)
if __name__ == "__main__" :
print('Synthesise clicks and sinusoids at regular time and frequencies')
fs = 16000
syn = np.zeros(4*fs)
ts = np.arange(len(syn))/float(fs)
# Add some frequencies
freqs = [0, fs/16.0, fs/2-fs/16.0, fs/2]
amps = -32
for freq in freqs:
amp = db2mag(amps)
print('Synthesise: {:8.2f}Hz at {}dB'.format(freq, amps))
if freq==0.0 or freq==fs/2: amp/=2
|
syn += amp*2.0*np.cos((2*np.pi*freq)*ts)
# Add some clicks
clicks = np.array([0.0, 1.0, 2.0, 3.0, (len(syn)-1)/float(fs)])
syn[(clicks*fs).astype(np.int)] = 0.5
#print(pysndfile.get_sndfile_encodings('wav'))
pysndfile.sndio.write('synth_grid_fs'+str(fs)+'.wav', syn, rate=fs, format='wav', enc='float32')
if 0:
plt.plot(ts, syn, 'k')
from IPython.core.debugger import Pdb; Pdb().set_trace()
|
theia-log/theia
|
theia/cli/tau.py
|
Python
|
apache-2.0
| 100
| 0
|
"""
-------------
|
theia.cli.tau
-------------
Tau is a Text User Inter
|
face frontend for Theia.
"""
|
frozenjava/RobotSimulator
|
examples/robotFunctionality.py
|
Python
|
gpl-2.0
| 704
| 0.002841
|
from jbot import simulator
def diagonal_moving(robot
|
):
robot.clear_messages()
robot.send_message("moving diagonally")
for m in range(0, 100):
robot.move_left(1)
robot.move_up(1)
def directional_moving(robot):
robot.send_message("moving down")
robot.move_down(30)
robot.send_message("moving up")
robot.move_up(60)
robot.send_message("moving left")
robot.move_left(25)
robot.send_message("moving right")
robot.move_right(600)
def main():
my_rob
|
ot = simulator.get_robot()
directional_moving(my_robot)
diagonal_moving(my_robot)
my_robot.send_message("All Done!")
if __name__ == "__main__":
simulator.simulate(main)
|
dede67/FillBD2
|
Database.py
|
Python
|
gpl-3.0
| 7,228
| 0.019676
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sqlite3
import os
HOMEDIR=os.path.expanduser('~')
DATABASENAME=os.path.join(HOMEDIR, ".fillBD.conf.sqlite")
# ###########################################################
# DB-Zugriff für die Profile
class Database():
def __init__(self):
self.dbname=DATABASENAME
if os.path.exists(self.dbname)==False:
self.connection=sqlite3.connect(self.dbname)
self.cursor=self.connection.cursor()
self.cursor.execute('CREATE TABLE destProfile' \
' (ID INTEGER NOT NULL PRIMARY KEY,' \
' name VARCHAR NOT NULL UNIQUE,' \
' comment VARCHAR,' \
' size INTEGER,' \
' blocksize INTEGER,' \
' addblock INTEGER,' \
' fldrname VARCHAR,' \
' final INTEGER)')
self.cursor.execute('CREATE TABLE baseProfile' \
' (ID INTEGER NOT NULL PRIMARY KEY,' \
' name VARCHAR NOT NULL UNIQUE,' \
' comment VARCHAR,' \
' hiddenfiles INTEGER)')
self.cursor.execute('CREATE TABLE folders' \
' (ID INTEGER NOT NULL PRIMARY KEY,' \
' baseProID INTEGER NOT NULL,' \
' name VARCHAR)')
self.connection.commit()
self.__fillDefaults()
else:
self.connection=sqlite3.connect(self.dbname)
self.cursor=self.connection.cursor()
# ###########################################################
# Stellt ein paar Default-Werte für Mediengrößen in die DB.
def __fillDefaults(self):
# def insertOrUpdateDest(self, name, comment, size, blocksize, addblock, folder, final):
self.insertOrUpdateDest("RealCrypt BluRay", "FAT32 auf UDF1.02 (Nero4)", 24931450880, 8192, 0, HOMEDIR, 0)
self.insertOrUpdateDest("DVD+R JolietRR", "genisoimage ISO9660+JolietRR", 4700002304, 2048, 1, HOMEDIR, 1)
self.insertOrUpdateDest("DVD+R UDF1.02", "genisoimage ISO9660+UDF1.02", 4699518976, 2048, 2, HOMEDIR, 1)
self.insertOrUpdateDest("DVD+R JolietRR+UDF1.02", "genisoimage ISO9660+JolietRR+UDF1.02", 4699506688, 2048, 3, HOMEDIR, 1)
self.insertOrUpdateDest("DVD+R UDF2.01", "truncate -s 4700372992 + mkudffs", 4698988544, 2048, 4, "/mnt", 1)
# ###########################################################
# Fügt einen Satz in "baseProfile" ein, die einzelnen
# Ordnernamen werden in "folders" eingefügt und dem frisch
# angelegten Satz in "baseProfile" zugeordnet.
def insertOrUpdateBase(self, name, comment, inclHidden, folderList):
self.cursor.execute('SELECT ID FROM baseProfile WHERE name LIKE ?', (name, ))
c=self.cursor.fetchone() # c[0]=ID
if c!=None:
# Satz exitiert schon
self.cursor.execute('UPDATE baseProfile SET comment=?, hiddenfiles=?' \
' WHERE ID=?', (comment, inclHidden, c[0]))
self.cursor.execute('DELETE FROM folders WHERE baseProID=?', (c[0], ))
for folder in folderList:
self.cursor.execute('INSERT INTO folders (baseProID, name) VALUES (?, ?)', (c[0], folder))
else:
# Satz neu anlegen
self.cursor.execute('INSERT INTO baseProfile (name, comment, hiddenfiles)' \
' VALUES (?, ?, ?)', (name, comment, inclHidden))
self.connection.commit()
self.cursor.execute('SELECT ID FROM baseProfile WHERE name=?', (name, ))
c=self.cursor.fetchone() # c[0]=ID
if c!=None:
for folder in folderList:
self.cursor.execute('INSERT INTO folders (baseProID, name) VALUES (?, ?)', (c[0], folder))
self.connection.commit()
# ###########################################################
# Löscht den Satz, bei dem die Spalte(name) LIKE "name" ist.
def deleteBase(self, name):
self.cursor.execute('SELECT ID FROM baseProfile WHERE name LIKE ?', (name, ))
c=self.cursor.fetchone()
if c!=None:
self.cursor.execute('DELETE FROM folders WHERE baseProID=?', (c[0], ))
self.cursor.execute('DELETE FROM baseProfile WHERE ID=?', (c[0], ))
self.connection.commit()
# ###########################################################
# Liefert alle Sätze aus "baseProfile" als Liste bzw. den
# einen Satz, bei dem die Spalte(name) LIKE "name" ist.
def getBaseProfiles(self, name="%"):
self.cursor.execute('SELECT ID, name, comment, hiddenfiles FROM baseProfile' \
' WHERE name LIKE ?', (name, ))
rows1=self.cursor.fetchall()
retlst=[]
for r1 in rows1:
self.cursor.execute('SELECT name FROM folders WHERE baseProID=?', (r1[0],))
rows2=self.cur
|
sor.fetchall()
fldrs=[]
for r2 in rows2:
fldrs.append(r2[0])
retlst.append((r1[1], r1[2], r1[3], fldrs))
return(retlst)
# ###########################################################
# Fügt einen Satz in "destProfile" ein.
def insertOrUpdateDest(self, name, comment, size, blocksize, addblock, folder, final):
self.cursor.execute(
|
'SELECT ID FROM destProfile WHERE name LIKE ?', (name, ))
c=self.cursor.fetchone() # c[0]=ID
if c!=None:
# Satz exitiert schon
self.cursor.execute('UPDATE destProfile' \
' SET comment=?, size=?, blocksize=?, addblock=?, fldrname=?, final=?' \
' WHERE ID=?', (comment, size, blocksize, addblock, folder, final, c[0]))
else:
self.cursor.execute('INSERT INTO destProfile (name, comment, size, blocksize, addblock, fldrname, final)' \
' VALUES (?, ?, ?, ?, ?, ?, ?)', (name, comment, size, blocksize, addblock, folder, final))
self.connection.commit()
# ###########################################################
# Löscht den Satz, bei dem die Spalte(name) LIKE "name" ist.
def deleteDest(self, name):
self.cursor.execute('DELETE FROM destProfile WHERE name like ?', (name, ))
self.connection.commit()
# ###########################################################
# Liefert alle Sätze aus "destProfile" als Liste bzw. den
# einen Satz, bei dem die Spalte(name) LIKE "name" ist.
def getDestProfiles(self, name="%"):
self.cursor.execute('SELECT name, comment, size, blocksize, addblock, fldrname, final FROM destProfile' \
' WHERE name LIKE ?', (name, ))
r=self.cursor.fetchall()
return(r)
# ###########################################################
# Liefert alle Destination-Profile-Namen.
def getDestProfileNames(self):
self.cursor.execute('SELECT name FROM destProfile')
r=self.cursor.fetchall()
dpn=[]
for r2 in r:
dpn.append(r2[0])
return(dpn)
# ###########################################################
# Liefert alle Source-Profile-Namen.
def getBaseProfileNames(self):
self.cursor.execute('SELECT name FROM baseProfile')
r=self.cursor.fetchall()
dpn=[]
for r2 in r:
dpn.append(r2[0])
return(dpn)
|
chadversary/deqp
|
scripts/caselist_diff.py
|
Python
|
apache-2.0
| 15,192
| 0.016522
|
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# drawElements Quality Program utilities
# --------------------------------------
#
# Copyright 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import sys
RENAME_LIST_2011_1_2011_2 = [
("dEQP-GLES2.functional.shaders.random.basic_expressions.*", "dEQP-GLES2.functional.shaders.random.basic_expression."),
("dEQP-GLES2.functional.shaders.random.scalar_conversions.*", "dEQP-GLES2.functional.shaders.random.scalar_conversion."),
("dEQP-GLES2.functional.fbo.render.color_clears_*", "dEQP-GLES2.functional.fbo.render.color_clear."),
("dEQP-GLES2.functional.fbo.render.intersecting_quads_*", "dEQP-GLES2.functional.fbo.render.depth."),
("dEQP-GLES2.functional.fbo.render.mix_*", "dEQP-GLES2.functional.fbo.render.color.mix_"),
("dEQP-GLES2.functional.fbo.render.blend_*", "dEQP-GLES2.functional.fbo.render.color.blend_"),
("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clears_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer_clear."),
("dEQP-GLES2.functional.fbo.render.shared_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_colorbuffer."),
("dEQP-GLES2.functional.fbo.render.shared_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.shared_depthbuffer."),
("dEQP-GLES2.functional.fbo.render.texsubimage_*", "dEQP-GLES2.functional.fbo.render.texsubimage."),
("dEQP-GLES2.functional.fbo.render.recreate_colorbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_colorbuffer.no_rebind_"),
("dEQP-GLES2.functional.fbo.render.recreate_depthbuffer_*", "dEQP-GLES2.functional.fbo.render.recreate_depthbuffer.no_rebind_"),
("dEQP-GLES2.functional.fbo.render.resize_*", "dEQP-GLES2.functional.fbo.render.resize.")
]
RENAME_LIST_2011_2_2011_3 = [
("dEQP-GLES2.usecases.ui.src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_1"),
("dEQP-GLES2.usecases.ui.src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_2"),
("dEQP-GLES2.usecases.ui.src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.src_over_linear_batched_4"),
("dEQP-GLES2.usecases.ui.src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.src_over_nearest_batched_4"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_1"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_2"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_linear_batched_4"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_1_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_2_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_4_batched", "dEQP-GLES2.usecases.ui.premultiplied_src_over_nearest_batched_4"),
("dEQP-GLES2.usecases.ui.no_blend_linear_1_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_1"),
("dEQP-GLES2.usecases.ui.no_blend_linear_2_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_2"),
("dEQP-GLES2.usecases.ui.no_blend_linear_4_batched", "dEQP-GLES2.usecases.ui.no_blend_linear_batched_4"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_1_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_1"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_2_batched", "dEQP-GLES2.usecases.ui.no_blend_nearest_batched_2"),
("dEQP-GLES2.usecases.ui.no_blend_nearest_4_batched",
|
"dEQP-GLES2.usecases.ui.no_blend_nearest_batched_4")
]
RENAME_LIST_2011_3_2011_4 = []
RENAME_LIST_2011_4_2012_1 = [
("dEQP-GLES2.functional.vertex_arrays.multiple_attributes.output_types.*", "dEQP-GLES2.functional.vertex_arrays.multiple_attributes.input_types."),
]
RENAME_LIST_2012_2_2012_3 = [
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_vertex",
|
"dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_float_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_float_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec2_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec2_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec3_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec3_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.mediump_vec4_float_fragment"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_vertex", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_vertex"),
("dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_fragment", "dEQP-GLES2.functional.shaders.operator.geometric.refract.highp_vec4_float_fragment"),
("dEQP-GLES2.functional.negative_api.texture.copyteximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.copyteximage2d_inequal_width_height_cube"),
("dEQP-GLES2.functional.negative_api.texture.teximage2d_unequal_width_height_cube", "dEQP-GLES2.functional.negative_api.texture.teximage2d_inequal_width_height_cube"),
("dEQP-GLES2.functional.negative_api.vertex_array.draw_arra
|
h2oai/h2o-3
|
h2o-py/tests/testdir_utils/pyunit_typechecks.py
|
Python
|
apache-2.0
| 6,106
| 0.001965
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""Pyunit for h2o.utils.typechecks."""
from __future__ import absolute_import, division, print_function
import math
from h2o import H2OFrame
from h2o.exceptions import H2OTypeError, H2OValueError
from h2o.utils.typechecks import (U, I, NOT, Tuple, Dict, numeric, h2oframe, pandas_dataframe, numpy_ndarray,
assert_is_type, assert_matches, assert_satisfies)
# noinspection PyUnresolvedReferences,PyClassHasNoInit
def test_asserts():
"""Test type-checking functionality."""
def assert_error(*args, **kwargs):
"""Check that assert_is_type() with given arguments throws an error."""
try:
assert_is_type(*args, **kwargs)
raise RuntimeError("Failed to throw an exception")
except H2OTypeError as exc:
# Check whether the message can stringify properly
message = str(exc)
assert len(message) < 1000
return
class A(object):
"""Dummy A."""
class B(A):
"""Dummy B."""
class C(A):
"""Dummy C."""
class D(B, C):
"""Dummy D."""
assert_is_type(3, int)
assert_is_type(2**100, int)
assert_is_type("3", str)
assert_is_type(u"3", str)
assert_is_type("foo", u"foo")
assert_is_type(u"foo", "foo")
assert_is_type("I", *list("ABCDEFGHIJKL"))
assert_is_type(False, bool)
assert_is_type(43, str, bool, int)
assert_is_type(4 / 3, int, float)
assert_is_type(None, None)
assert_is_type(None, A, str, None)
assert_is_type([], [float])
assert_is_type([1, 4, 5], [int])
assert_is_type([1.0, 2, 5], [int, float])
assert_is_type([[2.0, 3.1, 0], [2, 4.4, 1.1], [-1, 0]], [[int, float]])
assert_is_type([1, None, 2], [int, float, None])
assert_is_type({1, 5, 1, 1, 3}, {int})
assert_is_type({1, "hello", 3}, {int, str})
assert_is_type({"foo": 1, "bar": 2}, {str: int})
assert_is_type({"foo": 3, "bar": [5], "baz": None}, {str: U(int, None, [int])})
assert_is_type({"foo": 1, "bar": 2}, {"foo": int, "bar": U(int, float, None), "baz": bool})
assert_is_type({}, {"spam": int, "egg": int})
assert_is_type({"spam": 10}, {"spam": int, "egg": int})
assert_is_type({"egg": 1}, {"spam": int, "egg": int})
assert_is_type({"egg": 1, "spam": 10}, {"spam": int, "egg": int})
assert_is_type({"egg": 1, "spam": 10}, Dict(egg=int, spam=int))
assert_is_type({"egg": 1, "spam": 10}, Dict(egg=int, spam=int, ham=U(int, None)))
assert_is_type((1, 3), (int, int))
assert_is_type(("a", "b", "c"), (int, int, int), (str, str, str))
assert_is_type((1, 3, 4, 7, 11, 18), Tuple(int))
assert_is_type((1, 3, "spam", 3, "egg"), Tuple(int, str))
assert_is_type([1, [2], [{3}]], [int, [int], [{3}]])
assert_is_type(A(), None, A)
assert_is_type(B(), None, A)
assert_is_type(C(), A, B)
assert_is_type(D(), I(A, B, C))
assert_is_type(A, type)
assert_is_type(B, lambda aa: issubclass(aa, A))
for a in range(-2, 5):
assert_is_type(a, -2, -1, 0, 1, 2, 3, 4)
assert_is_type(1, numeric)
assert_is_type(2.2, numeric)
assert_is_type(1, I(numeric, object))
assert_is_type(34, I(int, NOT(0)))
assert_is_type(["foo", "egg", "spaam"], [I(str, NOT("spam"))])
assert_is_type(H2OFrame(), h2oframe)
assert_is_type([[2.0, 3.1, 0], [2, 4.4, 1.1], [-1, 0, 0]],
I([[numeric]], lambda v: all(len(vi) == len(v[0]) for vi in v)))
assert_is_type([None, None, float('nan'), None, "N/A"], [None, "N/A", I(float, math.isnan)])
assert_error(3, str)
assert_error(0, float)
assert_error("Z", *list("ABCDEFGHIJKL"))
assert_error(u"Z", "a", "...", "z")
assert_error("X", u"x")
assert_error(0, bool)
assert_error(0, float, str, bool, None)
assert_error([1, 5], [float])
assert_error((1, 3), (int, str), (str, int), (float, float))
assert_error(A(), None, B)
assert_error(A, A)
assert_error(A, lambda aa: issubclass(aa, B))
assert_error(135, I(int, lambda x: 0 <= x <= 100))
assert_error({"foo": 1, "bar": "2"}, {"foo": int, "bar": U(int, float, None)})
assert_error(3, 0, 2, 4)
assert_error(None, numeric)
assert_error("sss", numeric)
assert_error(B(), I(A, B, C))
assert_error(2, I(int, str))
assert_error(0, I(int, NOT(0)))
assert_error(None, NOT(None))
assert_error((1, 3, "2", 3), Tuple(int))
assert_error({"spam": 10}, Dict(spam=int, egg=int))
assert_error({"egg": 5}, Dict(spam=int, egg=int))
assert_error(False, h2oframe, pandas_dataframe, numpy_ndarray)
assert_error([[2.0, 3.1, 0], [2, 4.4, 1.1], [-1, 0]],
I([[numeric]], lambda v: all
|
(len(vi) == len(v[0]) for vi in v)))
try:
# Cannot use `assert_error` here because typechecks module cannot detect args in (*args, *kwargs)
assert_is_type(10000000, I(int, lambda port: 1 <= port <= 65535))
assert False, "Failed to throw an exception"
excep
|
t H2OTypeError as e:
assert "integer & 1 <= port <= 65535" in str(e), "Bad error message: '%s'" % e
url_regex = r"^(https?)://((?:[\w-]+\.)*[\w-]+):(\d+)/?$"
assert_matches("Hello, world!", r"^(\w+), (\w*)!$")
assert_matches("http://127.0.0.1:3233/", url_regex)
m = assert_matches("https://localhost:54321", url_regex)
assert m.group(1) == "https"
assert m.group(2) == "localhost"
assert m.group(3) == "54321"
x = 5
assert_satisfies(x, x < 1000)
assert_satisfies(x, x ** x > 1000)
assert_satisfies(url_regex, url_regex.lower() == url_regex)
try:
assert_satisfies(url_regex, url_regex.upper() == url_regex)
except H2OValueError as e:
assert "url_regex.upper() == url_regex" in str(e), "Error message is bad: " + str(e)
try:
import pandas
import numpy
assert_is_type(pandas.DataFrame(), pandas_dataframe)
assert_is_type(numpy.ndarray(shape=(5,)), numpy_ndarray)
except ImportError:
pass
# This test doesn't really need a connection to H2O cluster.
test_asserts()
|
m4773rcl0ud/launchpaddings
|
launchpaddings.py
|
Python
|
gpl-3.0
| 4,694
| 0.003409
|
from mididings import *
from launchpad_utils import *
config(
backend='jack-rt',
client_name='launchpad',
in_ports=[
'Pad Keys',
],
out_ports=[
'To Pad',
'To PC',
]
)
# FROM PAD TO PC
# First the controls
active = 0
muted = 127
UpperRow = (Filter(CTRL) >> CtrlValueFilter(127) >>
NoteOn(EVENT_CTRL, EVENT_CTRL) >> Velocity(-FirstCtrl))
# Use controls on upper row, what matters is event VALUE
dMap = {
0: (g
|
reen, 0, active),
1: (red, -16, muted),
2: (green, -16, active),
3: (red, -32, muted),
4: (green, -32, active),
5: (red, -48, muted),
6: (green, -48, active),
7: (red, -64, muted),
}
# The even rows activate (green), the odd rows mute (red) patterns 0-31
# Moreover, the Pad keys light up with the right colors
MapCtrl32 = [RowFilter(k) >> [Veloci
|
ty(fixed=v[0]) >> Port(1), # color to Pad
~OnlyRight >> Transpose(v[1]) >> Ctrl(EVENT_NOTE, v[2])
>> Port(2), # ctrl to PC
] for k, v in list(dMap.items())]
# I would like the right keys to activate/mute entire groups (rows) of patterns:
def EntireRow(row):
"Sends all notes of given row"
return [SquareKey(row, i) for i in longside]
ToEntireRow = [RowFilter(i) % EntireRow(i) for i in side]
FullControl = (OnlyRight % ToEntireRow >> MapCtrl32 >>
(CtrlFilter(7) % Ctrl(8, EVENT_VALUE)))
# this last thing as ctrl 7 mutes channel!
# This is our final function Pad -> PC.
# KEYBOARD MODE
cVec = [ # row colors for full keyboard (useful also in EQ, mixer...)
color(0, 2),
red,
color(1, 3),
color(2, 3),
yellow,
color(3, 2),
color(3, 1),
green,
]
hVec = [ # row colors for half keyboard (useful also in EQ, mixer...)
red,
orange,
yellow,
green,
red,
orange,
yellow,
green,
]
def Keyboard(tonic, scale, lower, upper, octaves, coloring):
return [RowFilter(i) >> [Velocity(fixed=coloring[i]) >> Port(1), ~OnlyRight
>> Transpose(tonic + (octaves * Octave) - (4 * i))
>> MakeScale(tonic, scale)
>> Port(2), ] for i in range(lower, upper)]
HalfHalf = KeySplit(64, FullControl, Keyboard(A, Minor, 4, 8, -2, hVec))
# FROM PC TO PAD
# I send notes from my DAW in a more understandable way, both for velocities
# and notes. This translates them into the right language for the Pad.
PlaceMap = [KeyFilter(C + (i * Octave), C + (i * Octave) + 8)
>> Transpose(88 - (i * 28)) for i in side]
# This maps octaves 8-1 to rows 0-7, semitones to columns
LightMap = [VelocityFilter(i * 8, (i * 8) + 8)
>> Velocity(fixed=color(i // 4, i % 4)) for i in range(0, 16)]
# This maps velocity intervals to colors - an 8-range of velocities is
# mapped to a color
PCToPad = Pass() >> LightMap >> PlaceMap
# This is our final function PC -> Pad
# pass is to overcome some kind of bug
# PORT ROUTING (see ports defined above)
PadFilter = PortFilter(1)
ControlFilter = PortFilter(2)
BeatFilter = PortFilter(3)
CandyFilter = PortFilter(4)
# SCENE STRUCTURE
# The control is handled by upper row
control = UpperRow >> SceneSwitch()
CtrlNoCandy = [PadFilter >> FullControl, # from PAD goes to the PC
BeatFilter >> Port(1) >> PCToPad, # The beat goes the other
# way around
]
CtrlCandy = [PadFilter >> FullControl,
CandyFilter >> Port(1) >> PCToPad,
]
HalfNoCandy = [PadFilter >> HalfHalf,
BeatFilter >> Port(1) >> KeyFilter(lower='c4') >> PCToPad,
]
HalfCandy = [PadFilter >> HalfHalf,
CandyFilter >> Port(1) >> PCToPad,
]
KeyNoCandy = PadFilter >> Keyboard(A, Minor, 0, 8, 0, cVec)
KeyCandy = [PadFilter >> Keyboard(A, Minor, 0, 8, 0, cVec),
CandyFilter >> Port(1) >> PCToPad,
]
# The post is dummy, I just make sure everything is sent on Channel 1
post = Channel(1) # Only channel where Pad receives (receives also on 3,
# but in a different way)
# Scenes I have implemented so far: only beat, both, only candy
# It's important to filter out what you don't use (or it goes
# to a wrong channel)
scenes = {
#1: Scene("Control", CtrlNoCandy),
1: Scene("Control and Keyboard", HalfNoCandy),
#3: Scene("Keyboard", KeyNoCandy),
#4: Scene("Empty", KeyNoCandy),
#5: Scene("Control Candy", CtrlCandy),
#6: Scene("Control Keyboard Candy", HalfCandy),
#7: Scene("Keyboard Candy", KeyCandy),
#8: Scene("Empty", CtrlNoCandy), #Put here what you want!
}
# And...go!
run(
control=None,
pre=Pass(),
post=post,
scenes=scenes,
)
|
pombredanne/django-narcissus
|
narcissus/garden/__init__.py
|
Python
|
bsd-3-clause
| 1,221
| 0.001638
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.datastructures import SortedDict
from django.utils.importlib import import_module
from narcissus.settings import FLOWERS
# Cache of actual flower classes.
_narcissus_flowers = None
def _get_flowers():
global _narcissus_flowers
if _narcissus_flowers is None:
flowers = []
for path in FLOWERS:
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = import_module(module)
except ImportError, e:
|
raise ImproperlyConfigured('Error importing narcissus flower module %s: "%s"' % (module, e))
try:
flower = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" flower class' % (module, attr))
# Structure it in a tuple that will be converted to a dict using
# the verbose names of the petals as keys.
flowers.append((flower.get_verbose_
|
name(), flower))
_narcissus_flowers = SortedDict(flowers)
return _narcissus_flowers
flowers = _get_flowers()
|
GNS3/gns3-server
|
gns3server/compute/dynamips/nodes/device.py
|
Python
|
gpl-3.0
| 2,538
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Device:
"""
Base device for switches and hubs
:param name: name for this device
:param node_id: Node instance identifier
:param project: Project instance
:param manager: Parent manager
:param hypervisor: Dynamips hypervisor instance
"""
def __init__(self, name, node_id, project, manager, hypervisor=None):
self._name = name
self._id = node_id
self._project =
|
project
self._manager = manager
self._hypervisor = hypervisor
@property
def hypervisor(self):
"""
Returns the current hypervisor.
:returns: hypervisor instance
"""
return self._hypervisor
@property
def project(self):
"""
Returns the device current project.
:returns: Project instance.
"""
return self._project
@property
|
def name(self):
"""
Returns the name for this device.
:returns: name
"""
return self._name
@name.setter
def name(self, new_name):
"""
Sets the name of this device.
:param new_name: name
"""
self._name = new_name
@property
def id(self):
"""
Returns the ID for this device.
:returns: device identifier (string)
"""
return self._id
@property
def manager(self):
"""
Returns the manager for this device.
:returns: instance of manager
"""
return self._manager
def updated(self):
"""
Send a updated event
"""
self.project.emit("node.updated", self)
def create(self):
"""
Creates the device.
"""
raise NotImplementedError
@property
def hw_virtualization(self):
return False
|
CiscoSystems/fabric_enabler
|
dfa/server/services/firewall/native/drivers/phy_asa.py
|
Python
|
apache-2.0
| 4,614
| 0
|
# Copyright 2015 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Ap
|
ache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless requi
|
red by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from dfa.common import dfa_logger as logging
from dfa.server.services.firewall.native import fabric_setup_base as FP
from dfa.server.services.firewall.native.drivers import base
from dfa.server.services.firewall.native.drivers import asa_rest as asa
LOG = logging.getLogger(__name__)
class PhyAsa(base.BaseDrvr, FP.FabricApi):
'''Physical ASA Driver'''
def __init__(self):
LOG.debug("Initializing physical ASA")
super(PhyAsa, self).__init__()
def initialize(self, cfg_dict):
LOG.debug("Initialize for PhyAsa")
self.mgmt_ip_addr = cfg_dict.get('mgmt_ip_addr').strip()
self.user = cfg_dict.get('user').strip()
self.pwd = cfg_dict.get('pwd').strip()
self.interface_in = cfg_dict.get('interface_in').strip()
self.interface_out = cfg_dict.get('interface_out').strip()
self.asa5585 = asa.Asa5585(self.mgmt_ip_addr, self.user, self.pwd)
def pop_evnt_que(self, que_obj):
LOG.debug("Pop Event for PhyAsa")
pass
def pop_dcnm_obj(self, dcnm_obj):
LOG.debug("Pop Event for DCNM obj")
pass
def nwk_create_notif(self, tenant_id, tenant_name, cidr):
''' Network Create Notification '''
LOG.debug("Nwk Create Notif PhyAsa")
pass
def nwk_delete_notif(self, tenant_id, tenant_name, nwk_id):
''' Network Delete Notification '''
LOG.debug("Nwk Delete Notif PhyAsa")
pass
def is_device_virtual(self):
return False
def get_name(self):
# Put it in a constant TODO(padkrish)
return 'phy_asa'
def get_max_quota(self):
return self.asa5585.get_quota()
def create_fw(self, tenant_id, data):
LOG.debug("In creating phy ASA FW data is %s", data)
tenant_name = data.get('tenant_name')
in_subnet, in_ip_start, in_ip_end, in_gw, in_sec_gw = (
self.get_in_ip_addr(tenant_id))
in_serv_node = self.get_in_srvc_node_ip_addr(tenant_id)
out_subnet, out_ip_start, out_ip_end, out_ip_gw, out_sec_gw = (
self.get_out_ip_addr(tenant_id))
out_serv_node = self.get_out_srvc_node_ip_addr(tenant_id)
in_seg, in_vlan = self.get_in_seg_vlan(tenant_id)
out_seg, out_vlan = self.get_out_seg_vlan(tenant_id)
status = self.asa5585.setup(tenant_name, in_vlan, out_vlan,
in_serv_node, '255.255.255.0', in_gw,
in_sec_gw, out_serv_node, '255.255.255.0',
out_ip_gw, out_sec_gw, self.interface_in,
self.interface_out)
if status is False:
LOG.error("Physical FW instance creation failure.")
return False
status = self.asa5585.apply_policy(data)
if status is False:
LOG.error("Applying FW policy failure.")
return status
def delete_fw(self, tenant_id, data):
LOG.debug("In Delete fw data is %s", data)
tenant_name = data.get('tenant_name')
in_subnet, in_ip_start, in_ip_end, in_gw, in_sec_gw = (
self.get_in_ip_addr(tenant_id))
in_serv_node = self.get_in_srvc_node_ip_addr(tenant_id)
out_subnet, out_ip_start, out_ip_end, out_ip_gw, out_sec_gw = (
self.get_out_ip_addr(tenant_id))
out_serv_node = self.get_out_srvc_node_ip_addr(tenant_id)
in_seg, in_vlan = self.get_in_seg_vlan(tenant_id)
out_seg, out_vlan = self.get_out_seg_vlan(tenant_id)
status = self.asa5585.cleanup(tenant_name, in_vlan, out_vlan,
in_serv_node, '255.255.255.0',
out_serv_node, '255.255.255.0',
self.interface_in, self.interface_out)
return status
def modify_fw(self, tenant_id, data):
LOG.debug("In Modify fw data is %s", data)
return self.asa5585.apply_policy(data)
|
VictorRodriguez/personal
|
ec-ea/practices/pract2/sga.py
|
Python
|
apache-2.0
| 2,812
| 0.011024
|
#!/usr/bin/env python3
import argparse
import random
import time
def bin(number):
return "{0:5b}".format(number).replace(' ','0')
def
|
initialize(population):
return [bin(random.randint(0,31)) for x in range(0, population)]
def evaluate(population):
tuples = []
suma = 0
end = False
for chaval in population:
value = int(chaval, 2)
y = value**2
tuples.append((value, y, 0))
suma += y
if value == 31:
end = True
return tuples, suma, end
def generate_prob(population,suma):
tuples = []
for
|
chaval in population:
probability = round(chaval[1] / suma,2)
tuples.append((chaval[0], chaval[1], probability))
return tuples
def ruleta(population):
random.shuffle(population)
random.shuffle(population)
rand_num = random.randint(1,100)
try:
rand_inv = 1 / rand_num
except ZeroDivisionError:
rand_inv = 0
#print("random_ruleta: %f" % rand_inv)
suma = 0
chaval = population[-1]
for idx,chaval in enumerate(population):
suma += chaval[2]
if rand_inv <= suma:
break
return chaval
def crossover(mom,dad):
point = random.randint(0,4)
mom_bin = bin(mom[0])
dad_bin = bin(dad[0])
f_half_dad = dad_bin[:point]
s_half_dad = dad_bin[point:]
f_half_mom = mom_bin[:point]
s_half_mom = mom_bin[point:]
child_1 = f_half_mom + s_half_dad
child_2 = f_half_dad + s_half_mom
return child_1,child_2
def main(ngenerations):
initial = initialize(4)
evaluated,suma,end = evaluate(initial)
evaluated_with_p = generate_prob(evaluated,suma)
generations = {}
last_generation = 0
for x in range(0, ngenerations):
last_generation += 1
try:
generations[str(initial)] += 1
except KeyError:
generations[str(initial)] = 1
child_1,child_2 = crossover(ruleta(evaluated_with_p),ruleta(evaluated_with_p))
child_3,child_4 = crossover(ruleta(evaluated_with_p),ruleta(evaluated_with_p))
initial = [child_1, child_2,
child_3, child_4]
evaluated,suma,end = evaluate(initial)
evaluated_with_p = generate_prob(evaluated,suma)
if end:
break
print("Last Generation: #%d" % last_generation)
for child in evaluated_with_p:
print(child)
for generation in generations.items():
print(generation)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-g', "--generations", help="Generations", type=int)
args = parser.parse_args()
if args.generations:
main(args.generations)
else:
parser.print_help()
|
Rihorama/dia2code
|
src/dia2code/classd/cls_attribute.py
|
Python
|
gpl-3.0
| 1,404
| 0.019231
|
#!/usr/bin/python3
class ClsAttribute:
visibility_dict = {0 : "public",
1 : "private",
2 : "protected",
3 : "public"} #3 stands for implementation which is not implemented
#so public is default here
|
def __init__(self, cls, attr_dict):
#attr_dict keys are equal to element names in the dia XML
self.my_class = cls
self.name = attr_dict["name"]
self.d_type =
|
attr_dict["type"]
self.visibility = self.visibility_dict[attr_dict["visibility"]]
self.abstract_flag = attr_dict["abstract"]
self.static_flag = attr_dict["class_scope"] #static is marked as "class_scope" in dia
self.comment = attr_dict["comment"]
self.value = None
#if value present
if attr_dict["value"] != "":
self.value = attr_dict["value"]
def printMe(self):
"""A simple print method for debugging purposes.
"""
print(" ATTRIBUTE {}".format(self))
print(" Name: {}".format(self.name))
print(" Data type: {}".format(self.d_type))
print(" Visibility: {}".format(self.visibility))
print(" Value: {}".format(self.value))
print(" ###############")
|
ihmpdcc/cutlass
|
tests/test_sample.py
|
Python
|
mit
| 13,059
| 0.000689
|
#!/usr/bin/env python
""" A unittest script for the Sample module. """
import unittest
import json
from cutlass import Sample
from cutlass import MIXS, MixsException
from CutlassTestConfig import CutlassTestConfig
from CutlassTestUtil import CutlassTestUtil
# pylint: disable=W0703, C1801
class SampleTest(unittest.TestCase):
"""" A unit test class for the Sample module. """
session = None
util = None
@classmethod
def setUpClass(cls):
""" Setup for the unittest. """
# Establish the session for each test method
cls.session = CutlassTestConfig.get_session()
cls.util = CutlassTestUtil()
def testImport(self):
""" Test the importation of the Sample module. """
success = False
try:
from cutlass import Sample
success = True
except Exception:
pass
self.failUnless(success)
self.failIf(Sample is None)
def testSessionCreate(self):
""" Test the creation of a Sample via the session. """
success = False
sample = None
try:
sample = self.session.create_sample()
success = True
except Exception:
pass
self.failUnless(success)
self.failIf(sample is None)
def testIntSampleId(self):
""" Test the int_sample_id property. """
sample = self.session.create_sample()
self.util.stringTypeTest(self, sample, "int_sample_id")
self.util.stringPropertyTest(self, sample, "int_sample_id")
def testFmaBodySite(self):
""" Test the fma_body_site property. """
sample = self.session.create_sample()
self.util.stringTypeTest(self, sample, "fma_body_site")
self.util.stringPropertyTest(self, sample, "fma_body_site")
def testIllegalBodySite(self):
""" Test the body_site property with an illegal value. """
sample = self.session.create_sample()
with self.assertRaises(Exception):
sample.body_site = "random"
def testLegalBodySite(self):
""" Test the body_site property with a legal value. """
sample = self.session.create_sample()
success = False
body_site = "wound"
try:
sample.body_site = body_site
success = True
except Exception:
pass
self.assertTrue(success, "Able to use the body_site setter")
self.assertEqual(
sample.body_site,
body_site,
"Property getter for 'body_site' works."
)
def testName(self):
""" Test the name property. """
sample = self.session.create_sample()
self.util.stringTypeTest(self, sample, "name")
self.util.stringPropertyTest(self, sample, "name")
def testIllegalSupersite(self):
""" Test the supersite property with an illegal value. """
sample = self.session.create_sample()
with self.assertRaises(Exception):
sample.supersite = "hear"
def testLegalSupersite(self):
""" Test the supersite property with a legal value. """
sample = self.session.create_sample()
success = False
supersite = "heart"
try:
sample.supersite = supersite
success = True
except Exception:
pass
self.assertTrue(success, "Able to use the supersite setter")
self.assertEqual(
sample.supersite,
supersite,
"Property getter for 'supersite' works."
)
def testToJson(self):
""" Test the generation of JSON from a Sample instance. """
sample = self.session.create_sample()
success = False
fma_body_site = "test_fma_body_site"
sample.fma_body_site = fma_body_site
sample_json = None
try:
sample_json = sample.to_json()
success = True
except Exception:
pass
self.assertTrue(success, "Able to use 'to_json'.")
self.assertTrue(sample_json is not None, "to_json() returned dat
|
a.")
parse_success = False
try:
sample_data = json.loads(sample_json)
parse_success = True
except Exception:
pass
self.assertTrue(parse_success,
"to_json() did not throw an exception.")
self.assertTrue(sample_data is not None,
"to_json() returned parsable JSON.")
self.assertTrue('meta' in sample_data, "JSON has 'meta' key in it.")
self.assertEqu
|
al(sample_data['meta']['fma_body_site'],
fma_body_site,
"'fma_body_site' in JSON had expected value."
)
def testDataInJson(self):
""" Test if the correct data is in the generated JSON. """
sample = self.session.create_sample()
success = False
fma_body_site = "test_fma_body_site"
name = "test_name"
sample.fma_body_site = fma_body_site
sample.name = name
sample_json = None
try:
sample_json = sample.to_json()
success = True
except Exception:
pass
self.assertTrue(success, "Able to use 'to_json'.")
self.assertTrue(sample_json is not None, "to_json() returned data.")
parse_success = False
try:
sample_data = json.loads(sample_json)
parse_success = True
except Exception:
pass
self.assertTrue(parse_success,
"to_json() did not throw an exception.")
self.assertTrue(sample_data is not None,
"to_json() returned parsable JSON.")
self.assertTrue('meta' in sample_data, "JSON has 'meta' key in it.")
self.assertEqual(sample_data['meta']['name'],
name,
"'name' in JSON had expected value."
)
def testId(self):
""" Test the id property. """
sample = self.session.create_sample()
self.assertTrue(sample.id is None,
"New template sample has no ID.")
with self.assertRaises(AttributeError):
sample.id = "test"
def testVersion(self):
""" Test the version property. """
sample = self.session.create_sample()
self.assertTrue(sample.version is None,
"New template sample has no version.")
with self.assertRaises(ValueError):
sample.version = "test"
def testMixs(self):
""" Test the mixs property. """
sample = self.session.create_sample()
self.assertTrue(sample.mixs is None,
"New template sample has no MIXS data.")
invalid_test_mixs = {
"a": 1,
"b": 2
}
with self.assertRaises(MixsException):
sample.mixs = invalid_test_mixs
self.assertTrue(sample.mixs is None,
"Template sample has no MIXS after invalid set attempt.")
valid_mixs = {
"biome": "biome",
"body_product": "body_product",
"collection_date": "2000-01-01",
"env_package": "env_package",
"feature": "feature",
"geo_loc_name": "geo_loc_name",
"lat_lon": "lat_lon",
"material": "material",
"project_name": "project_name",
"rel_to_oxygen": "rel_to_oxygen",
"samp_collect_device": "samp_collect_device",
"samp_mat_process": "samp_mat_process",
"samp_size": "samp_size",
"source_mat_id": ["a", "b", "c"]
}
# Assume failure
success = False
try:
sample.mixs = valid_mixs
success = True
except Exception:
pass
self.assertTrue(success, "Valid MIXS data does not raise exception.")
self.assertTrue(sample.mixs is not None, "mixs getter retrieves data.")
biome = sample.mixs['biome']
self.assertEqual(biom
|
debian-live/live-magic
|
tests/test_sources_list.py
|
Python
|
gpl-3.0
| 4,591
| 0.004356
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# live-magic - GUI frontend to create Debian LiveCDs, etc.
# Copyright (C) 2007-2010 Chris Lamb <lamby@debian.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from DebianLive.utils import get_m
|
irror
class TestSourcesList(unittest.TestCase):
def setUp(self):
import tempfile
fd, self.filename = tempfile.mkstemp('live-magic')
os.close(fd)
def t
|
earDown(self):
try:
os.unlink(self.filename)
except OSError:
pass
def f_w(self, contents, filename=None):
if filename is None:
f = open(self.filename, 'w+')
else:
f = open(filename, 'w+')
f.write(contents)
f.close()
class TestMatch(TestSourcesList):
def assertMatchLine(self, line):
self.f_w(line)
self.assert_(get_mirror(None, sources_list=self.filename, defaults=None))
def testCountryDebianMirror(self):
self.assertMatchLine('deb http://ftp.uk.debian.org/debian stable main')
def testDebianMirror(self):
self.assertMatchLine('deb http://ftp.debian.org/debian stable main')
def testLocalhost(self):
self.assertMatchLine('deb http://localhost/debian stable main')
def testOtherURL(self):
self.assertMatchLine('deb http://the.earth.li/debian stable main')
class TestNoMatch(TestSourcesList):
def assertNoMatchLine(self, line):
self.f_w(line)
self.failIf(get_mirror(None, sources_list=self.filename, defaults=None))
def testComments(self):
self.assertNoMatchLine('# comment')
def testBogus(self):
self.assertNoMatchLine('bogus')
def testSecurity(self):
self.assertNoMatchLine('deb http://security.debian.org/debian stable main')
def testBackports(self):
self.assertNoMatchLine('deb http://backports.debian.org/debian stable main')
def testVolatile(self):
self.assertNoMatchLine('deb http://volatile.debian.org/debian stable main')
def testMultimedia(self):
self.assertNoMatchLine('deb http://www.debian-multimedia.org/debian stable main')
class TestErrors(TestSourcesList):
def testFileNotFound(self):
self.failIf(get_mirror(None, sources_list='/proc/invisible-file', defaults=None))
class TestDefaults(TestSourcesList):
def setUp(self):
TestSourcesList.setUp(self)
import tempfile
fd, self.defaults = tempfile.mkstemp('live-magic')
os.close(fd)
def testDefaults(self):
mirror = 'http://test.com/debian'
self.f_w("bogus", self.filename)
self.f_w("LB_MIRROR_BOOTSTRAP=\"%s\"" % mirror, self.defaults)
ret = get_mirror(None, sources_list=self.filename, defaults=self.defaults)
self.assertEqual(ret, mirror)
def testDefaultsIOError(self):
self.f_w("bogus", self.filename)
ret = get_mirror('fallback', sources_list=self.filename, defaults='/proc/nosuchfile')
self.assertEqual(ret, 'fallback')
"""
# Not implemented yet
class Prefer(TestSourcesList):
def assertPrefer(self, *ordering):
def test():
self.f_w(""""""
deb %s stable main
deb %s stable main
"""""" % (ordering[0], ordering[1]))
self.assertEqual(self.s.get_mirror(), ordering[0])
test()
ordering.reverse()
self.setUp()
test()
def testPreferLocalhost(self):
self.assertPrefer('http://localhost/debian', 'http://ftp.uk.debian.org/debian')
def testPreferCountry(self):
self.assertPrefer('http://ftp.uk.debian.org/debian', 'http://ftp.debian.org/debian')
def testPreferNonOfficial(self):
self.assertPrefer('http://ftp.uk.debian.org/debian', 'http://backports.debian.org/debian')
"""
if __name__ == "__main__":
unittest.main()
|
mcgill-cpslab/MonkeyHelper
|
examples/DroidReplayer.py
|
Python
|
apache-2.0
| 2,426
| 0.004534
|
#
# Copyright 2014 Mingyuan Xia (http://mxia.me) and others
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the Li
|
cense.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contributors:
# Mingyuan Xia
#
"""
This script demonstrates how to repla
|
y a trace to a Android device. You need
monkeyrunner to run scripts once including this module
"""
import os, sys, inspect
def module_path():
''' returns the module path without the use of __file__.
from http://stackoverflow.com/questions/729583/getting-file-path-of-imported-module'''
return os.path.abspath(os.path.dirname(inspect.getsourcefile(module_path)))
sys.path.append(module_path())
sys.path.append(os.path.join(module_path(), '..', 'src'))
from Pipeline import Pipeline
import TraceManipulation as dtm
from TroubleMaker import TroubleReplayer, TroubleInjector
from MonkeyHelper import GestureReplayEventWrapper, EMonkeyDevice, MonkeyHelperReplayer
from Replayer import CompositeReplayer
def main():
if len(sys.argv) <= 1:
print "Usage: monkeyrunner DroidReplayer.py TRACE_PATH"
print "The trace must be generated from getevent -lt [EVDEV]"
return 1
print "Replay started"
pl = Pipeline()
pl.addStep(dtm.TextFileLineReader(sys.argv[1]))
pl.addStep(dtm.RawTraceParser())
pl.addStep(dtm.MultiTouchTypeAParser())
pl.addStep(dtm.RelativeTimingConverter())
dev = EMonkeyDevice()
pl.addStep(dtm.DeviceAdjuster(dev))
pl.addStep(dtm.FingerDecomposer())
pl.addStep(GestureReplayEventWrapper())
# this step might be necessary for a tablet
# pl.addStep(dtm.TrailScaler(0.8,0.8))
# pl.addStep(dtm.TimeScaler(0.25))
# trouble maker
# pl.addStep(TroubleInjector())
#replayers = [MonkeyHelperReplayer(dev), TroubleReplayer(dev)]
#pl.addStep(CompositeReplayer(replayers))
pl.addStep(MonkeyHelperReplayer(dev))
pl.addStep(dtm.GenericPrinter())
pl.execute()
print "Replay finished"
if __name__ == "__main__":
main()
|
sgerhart/ansible
|
lib/ansible/modules/monitoring/grafana_dashboard.py
|
Python
|
mit
| 14,915
| 0.002749
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Thierry Sallé (@seuf)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'
}
DOCUMENTATION = '''
---
module: grafana_dashboard
author:
- Thierry Sallé (@seuf)
version_added: "2.5"
short_description: Manage Grafana dashboards
description:
- Create, update, delete, export Grafana dashboards via API.
options:
url:
description:
- The Grafana URL.
required: true
aliases: [ grafana_url ]
version_added: 2.7
url_username:
description:
- The Grafana API user.
default: admin
aliases: [ grafana_user ]
version_added: 2.7
url_password:
description:
- The Grafana API password.
default: admin
aliases: [ grafana_password ]
version_added: 2.7
grafana_api_key:
description:
- The Grafana API key.
- If set, I(grafana_user) and I(grafana_password) will be ignored.
org_id:
description:
- The Grafana Organisation ID where the dashboard will be imported / exported.
- Not used when I(grafana_api_key) is set, because the grafana_api_key only belongs to one organisation..
default: 1
state:
description:
- State of the dashboard.
required: true
choices: [ absent, export, present ]
default: present
slug:
description:
- Deprecated since Grafana 5. Use grafana dashboard uid instead.
- slug of the dashboard. It's the friendly url name of the dashboard.
- When C(state) is C(present), this parameter can override the slug in the meta section of the json file.
- If you want to import a json dashboard exported directly from the interface (not from the api),
you have to specify the slug parameter because there is no meta section in the exported json.
uid:
version_added: 2.7
description:
- uid of the dasboard to export when C(state) is C(export) or C(absent).
path:
description:
- The path to the json file containing the Grafana dashboard to import or export.
overwrite:
description:
- Override existing dashboard when state is present.
type: bool
default: 'no'
message:
description:
- Set a commit message for the version history.
- Only used when C(state) is C(present).
validate_certs:
description:
- If C(no), SSL certificates will not be validated.
- This should only be used on personally controlled sites using self-signed certificates.
type: bool
default:
|
'yes'
client_cert:
description:
- PEM formatted certificate chain file to be used for SSL client authentication.
- This file can also include the key as well, and if the key is included, client_key is not required
version_added: 2.7
client_key:
description:
- PEM formatted file that contains your private key to be used for SSL client
- authentication. If client_cert contains both the certificate and key, this option is not required
|
version_added: 2.7
use_proxy:
description:
- Boolean of whether or not to use proxy.
default: 'yes'
type: bool
version_added: 2.7
'''
EXAMPLES = '''
- hosts: localhost
connection: local
tasks:
- name: Import Grafana dashboard foo
grafana_dashboard:
grafana_url: http://grafana.company.com
grafana_api_key: "{{ grafana_api_key }}"
state: present
message: Updated by ansible
overwrite: yes
path: /path/to/dashboards/foo.json
- name: Export dashboard
grafana_dashboard:
grafana_url: http://grafana.company.com
grafana_user: "admin"
grafana_password: "{{ grafana_password }}"
org_id: 1
state: export
uid: "000000653"
path: "/path/to/dashboards/000000653.json"
'''
RETURN = '''
---
uid:
description: uid or slug of the created / deleted / exported dashboard.
returned: success
type: string
sample: 000000063
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url, url_argument_spec
from ansible.module_utils._text import to_native
__metaclass__ = type
class GrafanaAPIException(Exception):
pass
class GrafanaMalformedJson(Exception):
pass
class GrafanaExportException(Exception):
pass
class GrafanaDeleteException(Exception):
pass
def grafana_switch_organisation(module, grafana_url, org_id, headers):
r, info = fetch_url(module, '%s/api/user/using/%s' % (grafana_url, org_id), headers=headers, method='POST')
if info['status'] != 200:
raise GrafanaAPIException('Unable to switch to organization %s : %s' % (org_id, info))
def grafana_headers(module, data):
headers = {'content-type': 'application/json; charset=utf8'}
if 'grafana_api_key' in data and data['grafana_api_key']:
headers['Authorization'] = "Bearer %s" % data['grafana_api_key']
else:
module.params['force_basic_auth'] = True
grafana_switch_organisation(module, data['grafana_url'], data['org_id'], headers)
return headers
def get_grafana_version(module, grafana_url, headers):
grafana_version = None
r, info = fetch_url(module, '%s/api/frontend/settings' % grafana_url, headers=headers, method='GET')
if info['status'] == 200:
try:
settings = json.loads(r.read())
grafana_version = str.split(settings['buildInfo']['version'], '.')[0]
except Exception as e:
raise GrafanaAPIException(e)
else:
raise GrafanaAPIException('Unable to get grafana version : %s' % info)
return int(grafana_version)
def grafana_dashboard_exists(module, grafana_url, uid, headers):
dashboard_exists = False
dashboard = {}
grafana_version = get_grafana_version(module, grafana_url, headers)
if grafana_version >= 5:
r, info = fetch_url(module, '%s/api/dashboards/uid/%s' % (grafana_url, uid), headers=headers, method='GET')
else:
r, info = fetch_url(module, '%s/api/dashboards/db/%s' % (grafana_url, uid), headers=headers, method='GET')
if info['status'] == 200:
dashboard_exists = True
try:
dashboard = json.loads(r.read())
except Exception as e:
raise GrafanaAPIException(e)
elif info['status'] == 404:
dashboard_exists = False
else:
raise GrafanaAPIException('Unable to get dashboard %s : %s' % (uid, info))
return dashboard_exists, dashboard
def grafana_create_dashboard(module, data):
# define data payload for grafana API
try:
with open(data['path'], 'r') as json_file:
payload = json.load(json_file)
except Exception as e:
raise GrafanaAPIException("Can't load json file %s" % to_native(e))
# define http header
headers = grafana_headers(module, data)
grafana_version = get_grafana_version(module, data['grafana_url'], headers)
if grafana_version < 5:
if data.get('slug'):
uid = data['slug']
elif 'meta' in payload and 'slug' in payload['meta']:
uid = payload['meta']['slug']
else:
raise GrafanaMalformedJson('No slug found in json. Needed with grafana < 5')
else:
if data.get('uid'):
uid = data['uid']
elif 'uid' in payload['dashboard']:
uid = payload['dashboard']['uid']
else:
uid = None
# test if dashboard already exists
dashboard_exists, dashboard = grafana_dashboard_exists(module, data['grafana_url'], uid, headers=headers)
result = {}
if dashboard_exists is True:
if dashboard == payload:
# unchanged
result['uid'] = uid
result['msg'] = "Dashboard %s unchanged." % uid
result['changed'] = False
else:
# update
if 'overwrite' in data and data['overwrite']:
payload['overwrite'] = True
if 'message' in data and
|
openstack/horizon
|
openstack_dashboard/dashboards/project/instances/tables.py
|
Python
|
apache-2.0
| 48,198
| 0
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.http import HttpResponse
from django import shortcuts
from django import template
from django.template.defaultfilters import title
from django import urls
from django.utils.http import urlencode
from django.utils.safestring import mark_safe
from django.utils.text import format_lazy
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from django.utils.translation import npgettext_lazy
from django.utils.translation import pgettext_lazy
import netaddr
from horizon import exceptions
from horizon import messages
from horizon import tables
from horizon.templatetags import sizeformat
from horizon.utils import filters
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.floating_ips import workflows
from openstack_dashboard.dashboards.project.instances import tabs
from openstack_dashboard.dashboards.project.instances \
import utils as instance_utils
from openstack_dashboard.dashboards.project.instances.workflows \
import resize_instance
from openstack_dashboard.dashboards.project.instances.workflows \
import update_instance
from openstack_dashboard import policy
from openstack_dashboard.views import get_url_with_pagination
LOG = logging.getLogger(__name__)
ACTIVE_STATES = ("ACTIVE",)
VOLUME_ATTACH_READY_STATES = ("ACTIVE", "SHUTOFF")
SNAPSHOT_READY_STATES = ("ACTIVE", "SHUTOFF", "PAUSED", "SUSPENDED")
SHELVE_READY_STATES = ("ACTIVE", "SHUTOFF", "PAUSED", "SUSPENDED")
POWER_STATES = {
0: "NO STATE",
1: "RUNNING",
2: "BLOCKED",
3: "PAUSED",
4: "SHUTDOWN",
5: "SHUTOFF",
6: "CRASHED",
7: "SUSPENDED",
8: "FAILED",
9: "BUILDING",
}
PAUSE = 0
UNPAUSE = 1
SUSPEND = 0
RESUME = 1
SHELVE = 0
UNSHELVE = 1
def is_deleting(instance):
task_state = getattr(instance, "OS-EXT-STS:task_state", None)
if not task_state:
return False
return task_state.lower() == "deleting"
class DeleteInstance(policy.PolicyTargetMixin, tables.DeleteAction):
policy_rules = (("compute", "os_compute_api:servers:delete"),)
help_text = _("Deleted instances are not recoverable.")
default_message_level = "info"
@staticmethod
def action_present(count):
return ngettext_lazy(
"Delete Instance",
"Delete Instances",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Scheduled deletion of Instance",
"Scheduled deletion of Instances",
count
)
def allowed(self, request, instance=None):
error_state = False
if instance:
error_state = (instance.status == 'ERROR')
return error_state or not is_deleting(instance)
def action(self, request, obj_id):
api.nova.server_delete(request, obj_id)
class RebootInstance(policy.PolicyTargetMixin, tables.BatchAction):
name = "reboot"
classes = ('btn-reboot',)
policy_rules = (("compute", "os_compute_api:servers:reboot"),)
help_text = _("Restarted instances will lose any data"
" not saved in persistent storage.")
action_type = "danger"
@staticmethod
def action_present(count):
return ngettext_lazy(
"Hard Reboot Instance",
"Hard Reboot Instances",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Hard Rebooted Instance",
"Hard Rebooted Instances",
count
)
def allowed(self, request, instance=None):
if instance is None:
return True
return ((instance.status in ACTIVE_STATES or
instance.status == 'SHUTOFF') and
not is_deleting(instance))
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=False)
class SoftRebootInstance(RebootInstance):
name = "soft_reboot"
@staticmethod
def action_present(count):
return ngettext_lazy(
"Soft Reboot Instance",
"Soft Reboot Instances",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Soft Rebooted Instance",
"Soft Rebooted Instances",
count
)
def action(self, request, obj_id):
api.nova.server_reboot(request, obj_id, soft_reboot=True)
def allowed(self, request, instance=None):
if instance is not None:
return instance.status in ACTIVE_STATES
return True
class RescueInstance(policy.PolicyTargetMixin, tables.LinkAction):
na
|
me = "rescue"
verbose_name = _("Rescue Instance")
policy_rules = (("compute", "os_compute_api:os-rescue"),)
classes = ("b
|
tn-rescue", "ajax-modal")
url = "horizon:project:instances:rescue"
def get_link_url(self, datum):
instance_id = self.table.get_object_id(datum)
return urls.reverse(self.url, args=[instance_id])
def allowed(self, request, instance):
return instance.status in ACTIVE_STATES
class UnRescueInstance(tables.BatchAction):
name = 'unrescue'
classes = ("btn-unrescue",)
@staticmethod
def action_present(count):
return ngettext_lazy(
"Unrescue Instance",
"Unrescue Instances",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Unrescued Instance",
"Unrescued Instances",
count
)
def action(self, request, obj_id):
api.nova.server_unrescue(request, obj_id)
def allowed(self, request, instance=None):
if instance:
return instance.status == "RESCUE"
return False
class TogglePause(tables.BatchAction):
name = "pause"
icon = "pause"
@staticmethod
def action_present(count):
return (
ngettext_lazy(
"Pause Instance",
"Pause Instances",
count
),
ngettext_lazy(
"Resume Instance",
"Resume Instances",
count
),
)
@staticmethod
def action_past(count):
return (
ngettext_lazy(
"Paused Instance",
"Paused Instances",
count
),
ngettext_lazy(
"Resumed Instance",
"Resumed Instances",
count
),
)
def allowed(self, request, instance=None):
if not instance:
return False
self.paused = instance.status == "PAUSED"
if self.paused:
self.current_present_action = UNPAUSE
policy_rules = (
("compute", "os_compute_api:os-pause-server:unpause"),)
else:
self.current_present_action = PAUSE
policy_rules = (
("compute", "os_compute_api:os-pause-server:pause"),)
has_permission = policy.check(
policy_rules, request,
target={'project_id': getattr(instance, 'tenant_id', None)})
return (has_permission and
(instance.status in ACTIVE_STATES or self.paused) and
not is_deleting(instance))
def action(self, request, obj_id):
if self.paused:
api.nova.server_unpause(request, obj_id)
self.current_past_action = UNPAUSE
else:
api.nova
|
alphagov/digitalmarketplace-supplier-frontend
|
tests/app/test_application.py
|
Python
|
mit
| 4,526
| 0.001994
|
# coding=utf-8
import mock
from lxml import html
from wtforms import ValidationError
from dmapiclient.errors import HTTPError
from app.main.helpers.frameworks import question_references
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup_method(self, method):
super(TestApplication, self).setup_method(method)
def test_response_headers(self):
response = self.client.get('/suppliers/create/start')
assert response.status_code == 200
assert (
response.headers['cache-control'] ==
"no-cache"
)
def test_url_with_non_canonical_trailing_slash(self):
response = self.client.get('/suppliers/')
assert response.status_code == 301
assert "http://localhost/suppliers" == response.location
def test_404(self):
res = self.client.get('/service/1234')
assert res.status_code == 404
assert "Check you’ve entered the correct web " \
"address or start again on the Digital Marketplace homepage." in res.get_data(as_text=True)
assert "If you can’t find what you’re looking for, contact us at " in res.get_data(as_text=True)
@mock.patch('app.main.views.suppliers.data_api_client')
def test_503(self, data_api_client):
with self.app.test_client():
self.login()
data_api_client.get_supplier.side_effect = HTTPError('API is down')
self.app.config['DEBUG'] = False
res = self.client.get('/suppliers')
assert res.status_code == 503
assert u"Sorry, we’re experiencing technical difficulties" in res.get_data(as_text=True)
assert "Try again later." in res.get_data(as_text=True)
def test_header_xframeoptions_set_to_deny(self):
res = self.client.get('/suppliers/create/start')
assert res.status_code == 200
assert 'DENY', res.headers['X-Frame-Options']
def test_should_use_local_cookie_page_on_cookie_message(self):
res = s
|
elf.client.get('/suppliers/create/start')
assert res.status_code == 200
document = html.fromstring(res.get_data(as_text=True))
cookie_banner = document.xpath('//div[@id="dm-cookie-banner"]')
assert cookie_banner[0].xpath('//h2//text()')[0].strip() == "Can we store analytics cookies on your device?"
@mock.patch('flask_wtf.csrf.validate_csrf', autospec=True)
@mock.patch('app.main.views.suppliers.data_api_client')
def test_csrf_handler_redirects_to_login
|
(self, data_api_client, validate_csrf):
self.login()
with self.app.test_client():
self.app.config['WTF_CSRF_ENABLED'] = True
self.client.set_cookie(
"localhost",
self.app.config['DM_COOKIE_PROBE_COOKIE_NAME'],
self.app.config['DM_COOKIE_PROBE_COOKIE_VALUE'],
)
data_api_client.get_supplier.return_value = {'suppliers': {'contactInformation': ['something']}}
# This will raise a CSRFError for us when the form is validated
validate_csrf.side_effect = ValidationError('The CSRF session token is missing.')
res = self.client.post('/suppliers/registered-address/edit', data={'anything': 'really'})
self.assert_flashes("Your session has expired. Please log in again.", expected_category="error")
assert res.status_code == 302
# POST requests will not preserve the request path on redirect
assert res.location == 'http://localhost/user/login'
assert validate_csrf.call_args_list == [mock.call(None)]
class TestQuestionReferences(object):
def get_question_mock(self, id):
return {'number': 19}
def test_string_with_with_question_references(self):
assert question_references(
'Please see question [[otherQuestion]] for more info',
self.get_question_mock
) == 'Please see question 19 for more info'
def test_string_with_no_question_references(self):
assert question_references(
'What was the name of your first pet?',
self.get_question_mock
) == 'What was the name of your first pet?'
def test_string_with_broken_question_references(self):
assert question_references(
'Here’s ]][[ a [[string full of ] misused square brackets]',
self.get_question_mock
) == 'Here’s ]][[ a [[string full of ] misused square brackets]'
|
toumorokoshi/sprinter
|
sprinter/core/featureconfig.py
|
Python
|
mit
| 4,648
| 0.000645
|
from __future__ import unicode_literals
from six.moves import configparser
import logging
import copy
import sys
import sprinter.lib as lib
EMPTY = object()
logger = logging.getLogger(__name__)
class ParamNotFoundException(Exception):
""" Exception for a parameter not being found """
class FeatureConfig(object):
manifest = None # the manifest the featureconfig is derived from
def __init__(self, manifest, feature_name):
self.feature_name = feature_name
self.manifest = manifest
self.raw_dict = dict(manifest.items(feature_name))
def get(self, param, default=EMPTY):
"""
Returns the nparam value, and returns the default if it doesn't exist.
If default is none, an exception will be raised instead.
the returned parameter will have been specialized against the global context
"""
if not self.has(param):
if default is not EMPTY:
return default
raise ParamNotFoundException("value for %s not found" % param)
context_dict = copy.deepcopy(self.manifest.get_context_dict())
for k, v in self.raw_dict.items():
context_dict["%s:%s" % (self.feature_name, k)] = v
cur_value = self.raw_dict[param]
prev_value = None
max_depth = 5
# apply the context until doing so does not change the value
while cur_value != prev_value and max_depth > 0:
prev_value = cur_value
try:
cur_value = str(prev_value) % context_dict
except KeyError:
e = sys.exc_info()[1]
key = e.args[0]
if key.startswith('config:'):
missing_key = key.split(':')[1]
if self.manifest.inputs.is_input(missing_key):
val = self.manifest.inputs.get_input(missing_key)
context_dict[key] = val
else:
logger.warn("Could not specialize %s! Error: %s" % (self.raw_dict[param], e))
return self.raw_dict[param]
except ValueError:
# this is an esoteric error, and this implementation
# forces a terrible solution. Sorry.
# using the standard escaping syntax in python is a mistake.
# if a value has a "%" inside (e.g. a password), a ValueError
# is raised, causing an issue
return cur_value
max_depth -= 1
return cur_value
def has(self, param):
""" return true if the param exists """
return param in self.raw_dict
def set(self, param, value):
|
""" sets the param to the value provided """
self.raw_dict[param] = value
self.manifest.set(self.feature_name, param, value)
def remove(self, param):
""" Remove a parameter from the manifest """
if self.has(param):
del(self.raw_dict[param])
self.manifest.remove_option(self.feature_name, param)
def keys(self):
""" return all of the keys in the config """
|
return self.raw_dict.keys()
def is_affirmative(self, param, default=None):
return lib.is_affirmative(self.get(param, default=default))
def set_if_empty(self, param, default):
""" Set the parameter to the default if it doesn't exist """
if not self.has(param):
self.set(param, default)
def to_dict(self):
""" Returns the context, fully specialized, as a dictionary """
return dict((k, str(self.get(k))) for k in self.raw_dict)
def write_to_manifest(self):
""" Overwrites the section of the manifest with the featureconfig's value """
self.manifest.remove_section(self.feature_name)
self.manifest.add_section(self.feature_name)
for k, v in self.raw_dict.items():
self.manifest.set(self.feature_name, k, v)
# implementing a dictionary-like behaviour for backwards compatibility
# it's most likely better to use set and get instead
def __getitem__(self, key):
try:
return self.get(key)
except ParamNotFoundException:
e = sys.exc_info()[1]
raise KeyError(str(e))
def __setitem__(self, key, value):
self.set(key, value)
def __delitem__(self, key, value):
self.remove(key, value)
def __contains__(self, item):
return self.has(item)
def __iter__(self):
return self.raw_dict.__iter__()
def __str__(self):
return "<featureconfig object for '{0}'>".format(self.feature_name)
|
UMTti/mauno
|
setup.py
|
Python
|
mit
| 160
| 0.00625
|
from setuptools import setup
setup(
name='flaskr',
packages=['fl
|
askr'],
include_package_data=True,
install_requires=[
'flask'
|
,
],
)
|
fboender/jsonxs
|
jsonxs/jsonxs.py
|
Python
|
mit
| 5,644
| 0.000886
|
#!/usr/bin/env python
"""
jsonxs uses a path expression string to get and set values in JSON and Python
datastructures.
For example:
>>> d = {
... 'feed': {
... 'id': 'my_feed',
... 'url': 'http://example.com/feed.rss',
... 'tags': ['devel', 'example', 'python'],
... 'short.desc': 'A feed',
... 'list': [
... {
... 'uuid': 'e9b48a2'
... }
... ]
... }
... }
# Get the value for a path expression
>>> jsonxs(d, 'feed.tags[-1]')
'python'
# Access paths with special chars in them
>>> jsonxs(d, 'feed.short\.desc')
'A feed'
# Return default value if path not found
>>> jsonxs(d, 'feed.long\.desc', default='N/A')
'N/A'
# Set the value for a path expression
>>> jsonxs(d, 'feed.id', ACTION_SET, 'your_feed')
>>> d['feed']['id']
'your_feed'
# Replace a value in a list
>>> jsonxs(d, 'feed.tags[-1]', ACTION_SET, 'javascript')
>>> d['feed']['tags']
['devel', 'example', 'javascript']
# Create a new key in a dict
>>> jsonxs(d, 'feed.author', ACTION_SET, 'Ferry Boender')
>>> d['feed']['author']
'Ferry Boender'
# Delete a value from a list
>>> jsonxs(d, 'feed.tags[0]', ACTION_DEL)
>>> d['feed']['tags']
['example', 'javascript']
# Delete a key/value pair from a dictionary
>>> jsonxs(d, 'feed.url', ACTION_DEL)
>>> 'url' in d['feed']
False
# Append a value to a list
>>> jsonxs(d, 'feed.tags', ACTION_APPEND, 'programming')
>>> d['feed']['tags']
['example', 'javascript', 'programming']
# Insert a value to a list
>>> jsonxs(d, 'feed.tags[1]', ACTION_INSERT, 'tech')
>>> d['feed']['tags']
['example', 'tech', 'javascript', 'programming']
# Create a dict value
>>> jsonxs(d, 'feed.details', ACTION_MKDICT)
>>> d['feed']['details'] == {}
True
# Add a key / value to newly created dict
>>> jsonxs(d, 'feed.list[0].uuid', ACTION_SET, 'aeaeae')
# Create a list value
>>> jsonxs(d, 'feed.details.users', ACTION_MKLIST)
>>> d['feed']['details']['users'] == []
True
# Fill the newly created list
>>> jsonxs(d, 'feed.details.users', ACTION_APPEND, 'fboender')
>>> jsonxs(d, 'feed.details.users', ACTION_APPEND, 'ppeterson')
>>> d['feed']['details']['users']
['fboender', 'ppeterson']
"""
ACTION_GET = 'get'
ACTION_SET = 'set'
ACTION_DEL = 'del'
ACTION_APPEND = 'append'
ACTION_INSERT = 'insert'
ACTION_MKDICT = 'mkdict'
ACTION_MKLIST = 'mklist'
def tokenize(expr):
"""
Parse a string expression into a set of tokens that can be used as a path
into a Python datastructure.
"""
tokens = []
escape = False
cur_token = ''
for c in expr:
if escape == True:
cur_token += c
escape = False
else:
if c == '\\':
# Next char will be escaped
escape = True
continue
elif c == '[':
# Next token is of type index (list)
if len(cur_token) > 0:
tokens.append(cur_token)
cur_token = ''
elif c == ']':
# End of index token. Next token defaults to a key (dict)
if len(cur_token) > 0:
tokens.append(int(cur_token))
cur_token = ''
elif c == '.':
# End of key token. Next token defaults to a key (dict)
if len(cur_token) > 0:
tokens.append(cur_token)
cur_token = ''
else:
# Append char to token name
cur_token += c
if len(cur_token) > 0:
tokens.append(cur_token)
return tokens
def jsonxs(data, expr, action=ACTION_GET, value=None, default=None):
"""
Get, set, delete values in a JSON structure. `expr` is a JSONpath-like
expression pointing to the desired value. `action` determines the action to
perform. See the module-level `ACTION_*` constants. `value` should be given
if action is `ACTION_SET`. If `default` is set and `expr` isn't found,
return `default` instead. This will override all exceptions.
"""
tokens = tokenize(expr)
# Walk through the list of tokens to reach the correct path in the data
# structure.
try:
prev_path = None
cur_path = data
for token in tokens:
prev_path = cur_path
if type(cur_path) is not list:
if not token in cur_path:
if action in [ACTION_SET, ACTION_MKDICT, ACTION_MKLIST]:
# When setting values or creating dicts/lists, the key can be
# mi
|
ssing from th
|
e data struture
continue
cur_path = cur_path[token]
except Exception:
if default is not None:
return default
else:
raise
# Perform action the user requested.
if action == ACTION_GET:
return cur_path
elif action == ACTION_DEL:
del prev_path[token]
elif action == ACTION_SET:
prev_path[token] = value
elif action == ACTION_APPEND:
prev_path[token].append(value)
elif action == ACTION_INSERT:
prev_path.insert(token, value)
elif action == ACTION_MKDICT:
prev_path[token] = {}
elif action == ACTION_MKLIST:
prev_path[token] = []
else:
raise ValueError("Invalid action: {}".format(action))
if __name__ == "__main__":
import doctest
doctest.testmod()
|
google/cog
|
cognitive/train_utils.py
|
Python
|
apache-2.0
| 9,504
| 0.00947
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless requi
|
red by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Training utility functions."""
from
|
__future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six import string_types
import random
import re
import json
import numpy as np
import traceback
from cognitive import stim_generator as sg
import cognitive.constants as const
_R_MEAN = 123.68
_G_MEAN = 116.78
_B_MEAN = 103.94
def convert_to_grid(xy_coord, prefs):
"""Given a x-y coordinate, return the target activity for a grid of neurons.
Args:
xy_coord : numpy 2-D array (batch_size, 2)
prefs: numpy 2-D array (n_out_pnt, 2). x and y preferences.
Returns:
activity: numpy array (batch_size, GRID_SIZE**2)
"""
sigma2 = 0.02 # 2*sigma-squared
activity = np.exp(-((xy_coord[:, 0:1] - prefs[:, 0])**2 +
(xy_coord[:, 1:2] - prefs[:, 1])**2) / sigma2)
activity = (activity.T / np.sum(activity, axis=1)).T
return activity
def map_sentence2ints(sentence):
"""Map a sentence to a list of words."""
word_list = re.findall(r"[\w']+|[.,!?;]", sentence)
int_list = [const.INPUTVOCABULARY.index(word) for word in word_list]
return np.array(int_list).astype(np.int32)
def preprocess(in_imgs_, vis_type):
"""Pre-process images."""
if (vis_type == 'vgg') or (vis_type == 'vgg_pretrain'):
in_imgs_ -= np.array([_R_MEAN, _G_MEAN, _B_MEAN], dtype=np.float32)
else:
in_imgs_ /= 255.
in_imgs_ -= np.mean(in_imgs_)
return in_imgs_
def tasks_to_rules(tasks):
"""Generate in_rule and seq_length arrays.
Args:
tasks: a list of tg.Task instances or string rules, length is batch_size.
"""
batch_size = len(tasks)
in_rule = np.zeros((const.MAXSEQLENGTH, batch_size), dtype=np.int64)
seq_length = np.zeros((batch_size,), dtype=np.int64)
for i_task, task in enumerate(tasks):
word_list = re.findall(r"[\w']+|[.,!?;]", str(task))
seq_length[i_task] = len(word_list)
for i_word, word in enumerate(word_list):
in_rule[i_word, i_task] = const.INPUTVOCABULARY.index(word)
return in_rule, seq_length
def set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for task, objset in zip(tasks, objsets):
target = task(objset, epoch_now)
if target is const.INVALID:
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, sg.Loc):
# minimize point loss
out_pnt_xy[j, :] = target.value
mask_pnt[j] = 1.
elif isinstance(target, bool) or isinstance(target, sg.Attribute):
if isinstance(target, bool):
target = 'true' if target else 'false'
else:
target = target.value
# For boolean target, only minimize word loss
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type.')
j += 1
def set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word):
j = 0
for epoch_now in range(n_epoch):
for objset in objsets:
target = objset.targets[epoch_now]
if target == 'invalid':
# For invalid target, no loss is used. Everything remains zero.
pass
elif isinstance(target, (list, tuple)):
assert len(target) == 2, "Expected 2-D target. Got " + str(target)
# minimize point loss
out_pnt_xy[j, :] = target
mask_pnt[j] = 1.
elif isinstance(target, string_types):
out_word[j] = const.OUTPUTVOCABULARY.index(target)
mask_word[j] = 1.
else:
raise TypeError('Unknown target type: %s %s' % (type(target), target))
j += 1
def generate_batch(tasks,
n_epoch=30,
img_size=224,
objsets=None,
n_distractor=1,
average_memory_span=2):
"""Generate a batch of trials.
Return numpy arrays to feed the tensorflow placeholders.
Args:
tasks: a list of tg.Task instances, length is batch_size.
n_epoch: int, number of epochs
img_size: int, image size
objsets: None or list of ObjectSet/StaticObjectSet instances
n_distractor: int, number of distractors to add
average_memory_span: int, the average number of epochs by which an object
need to be held in working memory, if needed at all
Returns:
All variables are numpy array of float32
in_imgs: (n_epoch*batch_size, img_size, img_size, 3)
in_rule: (max_seq_length, batch_size) the rule language input, type int32
seq_length: (batch_size,) the length of each task instruction
out_pnt: (n_epoch*batch_size, n_out_pnt)
out_pnt_xy: (n_epoch*batch_size, 2)
out_word: (n_epoch*batch_size, n_out_word)
mask_pnt: (n_epoch*batch_size)
mask_word: (n_epoch*batch_size)
Raises:
TypeError: when target type is incorrect.
"""
batch_size = len(tasks)
if objsets is None:
objsets = list()
for task in tasks:
objsets.append(
task.generate_objset(n_epoch,
n_distractor=n_distractor,
average_memory_span=average_memory_span))
max_objset_epoch = max([objset.n_epoch for objset in objsets])
assert max_objset_epoch == n_epoch, '%d != %d' % (max_objset_epoch, n_epoch)
in_imgs = sg.render(objsets, img_size)
# The rendered images are batch major
in_imgs = np.reshape(in_imgs, [batch_size, n_epoch, img_size, img_size, 3])
# Swap to time major
in_imgs = np.swapaxes(in_imgs, 0, 1)
# Outputs and masks
out_pnt_xy = np.zeros((n_epoch * batch_size, 2), dtype=np.float32)
out_word = np.zeros((n_epoch * batch_size), dtype=np.int64)
mask_pnt = np.zeros((n_epoch * batch_size), dtype=np.float32)
mask_word = np.zeros((n_epoch * batch_size), dtype=np.float32)
if isinstance(objsets[0], sg.StaticObjectSet):
set_outputs_from_targets(n_epoch, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
else:
set_outputs_from_tasks(n_epoch, tasks, objsets,
out_pnt_xy, out_word,
mask_pnt, mask_word)
# Process outputs
out_pnt = convert_to_grid(out_pnt_xy, const.PREFS)
# Generate rule inputs, padded to maximum number of words in a sentence
in_rule, seq_length = tasks_to_rules(tasks)
return (in_imgs, in_rule, seq_length, out_pnt, out_pnt_xy, out_word, mask_pnt,
mask_word)
def static_objsets_from_examples(examples):
"""Returns a list of StaticObjectSet objects.
Args:
examples: an iterable of dictionaries decoded from json examples.
"""
static_objsets = []
for e in examples:
static_objs = [o for multi_epoch_obj in e['objects']
for o in sg.static_objects_from_dict(multi_epoch_obj)]
static_objset = sg.StaticObjectSet(n_epoch=e['epochs'],
static_objects=static_objs,
targets=e['answers'])
static_objsets.append(static_objset)
return static_objsets
def json_to_feeds(json_examples):
if isinstance(json_examples, string_types):
json_examples = [json_examples]
examples = []
families = []
rules = []
for je in json_examples:
try:
e = json.loads(je)
except (ValueError, TypeError):
traceback.print_exc()
raise
|
jjerphan/semiotweet
|
semiotweet/urls.py
|
Python
|
gpl-3.0
| 857
| 0
|
"""semiotweet URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/
|
http/urls/
Examples:
Function
|
views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/v1/', include('api.urls')),
url(r'^', include('viewer.urls'))
]
|
pschulam/lmbases
|
tests/test_bsplines.py
|
Python
|
mit
| 2,966
| 0.001349
|
import numpy as np
import lmbases
def test_against_r_splines_uniform():
'''Compare BSplines class against R's bsplines with uniform knots.
Generate the ground truth with the following R commands:
> library(splines)
> x <- c(1.5, 3.3, 5.1, 7.2, 9.9)
> k <- c(2.5, 5.0, 7.5)
> b <- bs(x, knots=k, degree=2, intercept=TRUE, Boundary.knots=c(0, 10))
> print(b)
1 2 3 4 5 6
[1,] 0.16 0.6600 0.1800 0.0000 0.0000 0.0000
[2,] 0.00 0.2312 0.7176 0.0512 0.0000 0.0000
[3,] 0.00 0.0000 0.4608 0.5384 0.0008 0.0000
[4,] 0.00 0.0000 0.0072 0.6056 0.3872 0.0000
[5,] 0.00 0.0000 0.0000 0.0008 0.0776 0.9216
attr(,"degree")
[1] 2
attr(,"knots")
[1] 2.5 5.0 7.5
attr(,"Boundary.knots")
[1] 0 10
attr(,"intercept")
[1] TRUE
attr(,"class")
[1] "bs" "basis" "matrix"
'''
b_ground_truth = np.array(
[[0.16, 0.6600, 0.1800, 0.0000, 0.0000, 0.0000],
[0.00, 0.2312, 0.7176, 0.0512, 0.0000, 0.0000],
[0.00, 0.0000, 0.4608, 0.5384, 0.0008, 0.0000],
[0.00, 0.0000, 0.0072, 0.6056, 0.3872, 0.0000],
[0.00, 0.0000, 0.0000, 0.0008, 0.0776, 0.9216]])
x = np.array([1.5, 3.3, 5.1, 7.2, 9.9])
bs = lmbases.BSplines(low=0.0, high=10.0, num_bases=6, degree=2)
assert np.allclose(bs.design(x), b_ground_trut
|
h)
def test_against_r_splines_quantiles():
'''Compare BSplines class against R's bsplines with quantile knots.
Generate the ground truth with the following R commands:
> library(splines)
> x <- c(1.5, 3.3, 5.1, 7.2, 9.9)
> b <- bs(x, degree=2, df=6, intercept=TRUE, Boundary.knots=c(0, 10))
> print(b)
1 2 3
|
4 5 6
[1,] 0.2975207 0.5687895 0.1336898 0.000000000 0.0000000 0.0000000
[2,] 0.0000000 0.3529412 0.6470588 0.000000000 0.0000000 0.0000000
[3,] 0.0000000 0.0000000 0.5384615 0.461538462 0.0000000 0.0000000
[4,] 0.0000000 0.0000000 0.0000000 0.571428571 0.4285714 0.0000000
[5,] 0.0000000 0.0000000 0.0000000 0.000728863 0.0694242 0.9298469
attr(,"degree")
[1] 2
attr(,"knots")
25% 50% 75%
3.3 5.1 7.2
attr(,"Boundary.knots")
[1] 0 10
attr(,"intercept")
[1] TRUE
attr(,"class")
[1] "bs" "basis" "matrix"
'''
b_ground_truth = np.array(
[[0.2975207, 0.5687895, 0.1336898, 0.000000000, 0.0000000, 0.0000000],
[0.0000000, 0.3529412, 0.6470588, 0.000000000, 0.0000000, 0.0000000],
[0.0000000, 0.0000000, 0.5384615, 0.461538462, 0.0000000, 0.0000000],
[0.0000000, 0.0000000, 0.0000000, 0.571428571, 0.4285714, 0.0000000],
[0.0000000, 0.0000000, 0.0000000, 0.000728863, 0.0694242, 0.9298469]])
x = np.array([1.5, 3.3, 5.1, 7.2, 9.9])
bs = lmbases.BSplines(low=0.0, high=10.0, num_bases=6, degree=2, x=x)
assert np.allclose(bs.design(x), b_ground_truth)
|
monikagrabowska/osf.io
|
api_tests/registrations/views/test_withdrawn_registrations.py
|
Python
|
apache-2.0
| 7,865
| 0.003687
|
from urlparse import urlparse
from api_tests.nodes.views.test_node_contributors_list import NodeCRUDTestCase
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from tests.base import fake
from osf_tests.factories import (
ProjectFactory,
CommentFactory,
RegistrationFactory,
WithdrawnRegistrationFactory,
)
class TestWithdrawnRegistrations(NodeCRUDTestCase):
def setUp(self):
super(TestWithdrawnRegistrations, self).setUp()
self.registration = RegistrationFactory(creator=self.user, project=self.public_project)
self.withdrawn_registration = WithdrawnRegistrationFactory(registration=self.registration, user=self.registration.creator)
self.public_pointer_project = ProjectFactory(is_public=True)
self.public_pointer = self.public_project.add_pointer(self.public_pointer_project,
auth=Auth(self.user),
save=True)
self.withdrawn_url = '/{}registrations/{}/?version=2.2'.format(API_BASE, self.registration._id)
self.withdrawn_registration.justification = 'We made a major error.'
self.withdrawn_registration.save()
def test_can_access_withdrawn_contributors(self):
url = '/{}registrations/{}/contributors/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_access_withdrawn_children(self):
url = '/{}registrations/{}/children/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_comments(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_comment = CommentFactory(node=self.public_project, user=self.user)
url = '/{}registrations/{}/comments/'.format(API_BASE, self.registration._i
|
d)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_can_access_withdrawn_contributor_detail(self):
url = '/{}registrations/{}/contributors/{}/'.format(API_BASE, self.registration._id, self.user._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_
|
return_a_withdrawn_registration_at_node_detail_endpoint(self):
url = '/{}nodes/{}/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_delete_a_withdrawn_registration(self):
url = '/{}registrations/{}/'.format(API_BASE, self.registration._id)
res = self.app.delete_json_api(url, auth=self.user.auth, expect_errors=True)
self.registration.reload()
assert_equal(res.status_code, 405)
def test_cannot_access_withdrawn_files_list(self):
url = '/{}registrations/{}/files/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_detail(self):
url = '/{}registrations/{}/node_links/{}/'.format(API_BASE, self.registration._id, self.public_pointer._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_list(self):
url = '/{}registrations/{}/node_links/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_logs(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
url = '/{}registrations/{}/logs/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_registrations_list(self):
self.registration.save()
url = '/{}registrations/{}/registrations/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_withdrawn_registrations_display_limited_fields(self):
registration = self.registration
res = self.app.get(self.withdrawn_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
attributes = res.json['data']['attributes']
registration.reload()
expected_attributes = {
'title': registration.title,
'description': registration.description,
'date_created': registration.date_created.isoformat().replace('+00:00', 'Z'),
'date_registered': registration.registered_date.isoformat().replace('+00:00', 'Z'),
'date_modified': registration.date_modified.isoformat().replace('+00:00', 'Z'),
'withdrawal_justification': registration.retraction.justification,
'public': None,
'category': None,
'registration': True,
'fork': None,
'collection': None,
'tags': None,
'withdrawn': True,
'pending_withdrawal': None,
'pending_registration_approval': None,
'pending_embargo_approval': None,
'embargo_end_date': None,
'registered_meta': None,
'current_user_permissions': None,
'registration_supplement': registration.registered_schema.first().name
}
for attribute in expected_attributes:
assert_equal(expected_attributes[attribute], attributes[attribute])
contributors = urlparse(res.json['data']['relationships']['contributors']['links']['related']['href']).path
assert_equal(contributors, '/{}registrations/{}/contributors/'.format(API_BASE, registration._id))
assert_not_in('children', res.json['data']['relationships'])
assert_not_in('comments', res.json['data']['relationships'])
assert_not_in('node_links', res.json['data']['relationships'])
assert_not_in('registrations', res.json['data']['relationships'])
assert_not_in('parent', res.json['data']['relationships'])
assert_not_in('forked_from', res.json['data']['relationships'])
assert_not_in('files', res.json['data']['relationships'])
assert_not_in('logs', res.json['data']['relationships'])
assert_not_in('registered_by', res.json['data']['relationships'])
assert_not_in('registered_from', res.json['data']['relationships'])
assert_not_in('root', res.json['data']['relationships'])
def test_field_specific_related_counts_ignored_if_hidden_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=children'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_not_in('children', res.json['data']['relationships'])
assert_in('contributors', res.json['data']['relationships'])
def test_field_specific_related_counts_retrieved_if_visible_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=contributors'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['relationships']['contributors']['links']['related']['meta']['count'], 1)
|
CartoDB/crankshaft
|
src/py/crankshaft/crankshaft/regression/gwr/base/gwr.py
|
Python
|
bsd-3-clause
| 39,275
| 0.004328
|
#Main GWR classes
#Offset does not yet do anyhting and needs to be implemented
__author__ = "Taylor Oshan Tayoshan@gmail.com"
import numpy as np
import numpy.linalg as la
from scipy.stats import t
from .kernels import *
from .diagnostics import get_AIC, get_AICc, get_BIC
import pysal.spreg.user_output as USER
from crankshaft.regression.glm.family import Gaussian, Binomial, Poisson
from crankshaft.regression.glm.glm import GLM, GLMResults
from crank
|
shaft.regression.glm.iwls import iwls
from crankshaft.regression.glm.utils import cache_readonly
fk = {'gaussian': fix_gauss, 'bisquare': fix_bisquare,
|
'exponential': fix_exp}
ak = {'gaussian': adapt_gauss, 'bisquare': adapt_bisquare, 'exponential': adapt_exp}
class GWR(GLM):
"""
Geographically weighted regression. Can currently estimate Gaussian,
Poisson, and logistic models(built on a GLM framework). GWR object prepares
model input. Fit method performs estimation and returns a GWRResults object.
Parameters
----------
coords : array-like
n*2, collection of n sets of (x,y) coordinates of
observatons; also used as calibration locations is
'points' is set to None
y : array
n*1, dependent variable
X : array
n*k, independent variable, exlcuding the constant
points : array-like
n*2, collection of n sets of (x,y) coordinates used for
calibration locations; default is set to None, which
uses every observation as a calibration point
bw : scalar
bandwidth value consisting of either a distance or N
nearest neighbors; user specified or obtained using
Sel_BW
family : family object
underlying probability model; provides
distribution-specific calculations
offset : array
n*1, the offset variable at the ith location. For Poisson model
this term is often the size of the population at risk or
the expected size of the outcome in spatial epidemiology
Default is None where Ni becomes 1.0 for all locations;
only for Poisson models
sigma2_v1 : boolean
specify sigma squared, True to use n as denominator;
default is False which uses n-k
kernel : string
type of kernel function used to weight observations;
available options:
'gaussian'
'bisquare'
'exponential'
fixed : boolean
True for distance based kernel function and False for
adaptive (nearest neighbor) kernel function (default)
constant : boolean
True to include intercept (default) in model and False to exclude
intercept.
Attributes
----------
coords : array-like
n*2, collection of n sets of (x,y) coordinates used for
calibration locations
y : array
n*1, dependent variable
X : array
n*k, independent variable, exlcuding the constant
bw : scalar
bandwidth value consisting of either a distance or N
nearest neighbors; user specified or obtained using
Sel_BW
family : family object
underlying probability model; provides
distribution-specific calculations
offset : array
n*1, the offset variable at the ith location. For Poisson model
this term is often the size of the population at risk or
the expected size of the outcome in spatial epidemiology
Default is None where Ni becomes 1.0 for all locations
sigma2_v1 : boolean
specify sigma squared, True to use n as denominator;
default is False which uses n-k
kernel : string
type of kernel function used to weight observations;
available options:
'gaussian'
'bisquare'
'exponential'
fixed : boolean
True for distance based kernel function and False for
adaptive (nearest neighbor) kernel function (default)
constant : boolean
True to include intercept (default) in model and False to exclude
intercept
n : integer
number of observations
k : integer
number of independent variables
mean_y : float
mean of y
std_y : float
standard deviation of y
fit_params : dict
parameters passed into fit method to define estimation
routine
W : array
n*n, spatial weights matrix for weighting all
observations from each calibration point
"""
def __init__(self, coords, y, X, bw, family=Gaussian(), offset=None,
sigma2_v1=False, kernel='bisquare', fixed=False, constant=True):
"""
Initialize class
"""
GLM.__init__(self, y, X, family, constant=constant)
self.constant = constant
self.sigma2_v1 = sigma2_v1
self.coords = coords
self.bw = bw
self.kernel = kernel
self.fixed = fixed
if offset is None:
self.offset = np.ones((self.n, 1))
else:
self.offset = offset * 1.0
self.fit_params = {}
self.W = self._build_W(fixed, kernel, coords, bw)
self.points = None
self.exog_scale = None
self.exog_resid = None
self.P = None
def _build_W(self, fixed, kernel, coords, bw, points=None):
if fixed:
try:
W = fk[kernel](coords, bw, points)
except:
raise TypeError('Unsupported kernel function ', kernel)
else:
try:
W = ak[kernel](coords, bw, points)
except:
raise TypeError('Unsupported kernel function ', kernel)
return W
def fit(self, ini_params=None, tol=1.0e-5, max_iter=20, solve='iwls'):
"""
Method that fits a model with a particular estimation routine.
Parameters
----------
ini_betas : array
k*1, initial coefficient values, including constant.
Default is None, which calculates initial values during
estimation
tol: float
Tolerence for estimation convergence
max_iter : integer
Maximum number of iterations if convergence not
achieved
solve : string
Technique to solve MLE equations.
'iwls' = iteratively (re)weighted least squares (default)
"""
self.fit_params['ini_params'] = ini_params
self.fit_params['tol'] = tol
self.fit_params['max_iter'] = max_iter
self.fit_params['solve']= solve
if solve.lower() == 'iwls':
m = self.W.shape[0]
params = np.zeros((m, self.k))
predy = np.zeros((m, 1))
v = np.zeros((m, 1))
w = np.zeros((m, 1))
|
heyLu/pixie
|
pixie/vm/code.py
|
Python
|
gpl-3.0
| 25,247
| 0.002812
|
py_object = object
import pixie.vm.object as object
from pixie.vm.object import affirm
from pixie.vm.primitives import nil, true, false
from rpython.rlib.rarithmetic import r_uint
from rpython.rlib.jit import elidable, elidable_promote, promote
import rpython.rlib.jit as jit
import pixie.vm.rt as rt
BYTECODES = ["LOAD_CONST",
"ADD",
"EQ",
"INVOKE",
"TAIL_CALL",
"DUP_NTH",
"RETURN",
"COND_BR",
"JMP",
"CLOSED_OVER",
"MAKE_CLOSURE",
"SET_VAR",
"POP",
"DEREF_VAR",
"INSTALL",
"RECUR",
"LOOP_RECUR",
"ARG",
"PUSH_SELF",
"POP_UP_N",
"MAKE_MULTI_ARITY",
"MAKE_VARIADIC",
"YIELD"]
for x in range(len(BYTECODES)):
globals()[BYTECODES[x]] = r_uint(x)
@jit.unroll_safe
def resize_list(lst, new_size):
"""'Resizes' a list, via reallocation and copy"""
affirm(len(lst) < new_size, u"New list must be larger than old list")
new_list = [None] * new_size
i = r_uint(0)
while i < len(lst):
new_list[i] = lst[i]
i += 1
return new_list
@jit.unroll_safe
def list_copy(from_lst, from_loc, to_list, to_loc, count):
from_loc = r_uint(from_loc)
to_loc = r_uint(to_loc)
count = r_uint(count)
i = r_uint(0)
while i < count:
to_list[to_loc + i] = from_lst[from_loc+i]
i += 1
return to_list
@jit.unroll_safe
def slice_to_end(from_list, start_pos):
start_pos = r_uint(start_pos)
items_to_copy = len(from_list) - start_pos
new_lst = [None] * items_to_copy
list_copy(from_list, start_pos, new_lst, 0, items_to_copy)
return new_lst
@jit.unroll_safe
def slice_from_start(from_list, count, extra=r_uint(0)):
new_lst = [None] * (count + extra)
list_copy(from_list, 0, new_lst, 0, count)
return new_lst
# class TailCall(object.Object):
# _type = object.Type("TailCall")
# __immutable_fields_ = ["_f", "_args"]
# def __init__(self, f, args):
# self._f = f
# self._args = args
#
# def run(self):
# return self._f._invoke(self._args)
class BaseCode(object.Object):
def __init__(self):
assert isinstance(self, BaseCode)
self._is_macro = False
self._meta = nil
def meta(self):
return self._meta
def with_meta(self, meta):
assert false, "not implemented"
def set_macro(self):
self._is_macro = True
def is_macro(self):
return self._is_macro
def get_consts(self):
raise NotImplementedError()
def get_bytecode(self):
raise NotImplementedError()
@elidable_promote()
def stack_size(self):
return 0
def invoke_with(self, args, this_fn):
return self.invoke(args)
class MultiArityFn(BaseCode):
_type = object.Type(u"pixie.stdlib.MultiArityFn")
_immutable_fields_ = ["_arities[*]", "_required_arity", "_rest_fn"]
def type(self):
return MultiArityFn._type
def __init__(self, arities, required_arity=0, rest_fn=None, meta=nil):
BaseCode.__init__(self)
self._arities = arities
self._required_arity = required_arity
self._rest_fn = rest_fn
self._meta = meta
def with_meta(self, meta):
return MultiArityFn(self._arities, self._required_arity, self._rest_fn, meta)
@elidable_promote()
def get_fn(self, arity):
f = self._arities.get(arity, None)
if f is not None:
return f
if self._rest_fn is not None and arity >= self._required_arity:
return self._rest_fn
acc = []
for x in self._arities:
acc.append(unicode(str(x)))
if self._rest_fn:
acc.append(u" or more")
affirm(False, u"Wrong number of args to fn: got " + unicode(str(arity)) + u" expected " + u",".join(acc))
def invoke(self, args):
return self.invoke_with(args, self)
def invoke_with(self, args, self_fn):
return self.get_fn(len(args)).invoke_with(args, self_fn)
class NativeFn(BaseCode):
"""Wrapper for a native function"""
_type = object.Type(u"pixie.stdlib.NativeFn")
def __init__(self):
BaseCode.__init__(self)
def type(self):
return NativeFn._type
def invoke(self, args):
return self.inner_invoke(args)
def inner_invoke(self, args):
raise NotImplementedError()
def invoke_with(self, args, this_fn):
return self.invoke(args)
class Code(BaseCode):
"""Interpreted code block. Contains consts and """
_type = object.Type(u"pixie.stdlib.Code")
__immutable_fields__ = ["_consts[*]", "_bytecode", "_stack_size", "_meta"]
def type(self):
return Code._type
def __init__(self, name, bytecode, consts, stack_size, debug_points, meta=nil):
BaseCode.__init__(self)
self._bytecode = bytecode
self._consts = consts
self._name = name
self._stack_size = stack_size
self._debug_points = debug_points
self._meta = meta
def with_meta(self, meta):
return Code(self._name, self._bytecode, self._consts, self._stack_size, self._debug_points, meta)
def get_debug_points(self):
return self._debug_points
def invoke(self, args):
return self.invoke_with(args, self)
def invoke_with(self, args, this_fn):
try:
return interpret(self, args, self_obj=this_fn)
except object.WrappedException as ex:
ex._ex._trace.append(object.PixieCodeInfo(self._name))
raise
@elidable_promote()
def get_consts(self):
return self._consts
@elidable_promote()
def get_bytecode(self):
return self._bytecode
@elidable_promote()
def stack_size(self):
return self._stack_size
@elidable_promote()
def get_base_code(self):
return self
class VariadicCode(BaseCode):
__immutable_fields__ = ["_required_arity", "_code", "_meta"]
_type = object.Type(u"pixie.stdlib.VariadicCode")
def type(self):
return VariadicCode._type
def __init__(self, code, required_arity, meta=nil):
BaseCode.__init__(self)
self._required_arity = r_uint(required_arity)
self._code = code
self._meta = meta
def with_meta(self, meta):
return VariadicCode(self._code, self._required_arity, meta)
def invoke(self, args):
return self.invoke_with(args, self)
def invoke_with(self, args, self_fn):
from pixie.vm.array import array
argc = len(args)
if self._required_arity == 0:
return self._code.invoke_with([array(args)], self_fn)
if argc == self._required_arity:
new_args = resize_list(args, len(args) + 1)
new_args[len(args)] = array([])
return self._code.invoke_with(new_args, self_fn)
elif argc > self._required_arity:
start = slice_from_start(args, self._required_arity, 1)
rest = slice_to_end(args, self._required_arity)
st
|
art[self._required_arity] = array(rest)
return self._code.invoke_with(start, self_fn)
affirm(False, u"Got " + unicode(str(argc)) + u" arg(s) need at least " + unicode(str(self._required_arity)))
class Closure(BaseCode):
_type = object.Type(u"pixie.stdlib.Closure")
__immutable_fields__ = ["_closed_overs[*]", "_code", "_meta"]
def type(self):
return Closure._type
|
def __init__(self, code, closed_overs, meta=nil):
BaseCode.__init__(self)
affirm(isinstance(code, Code), u"Code argument to Closure must be an instance of Code")
self._code = code
self._closed_overs = closed_overs
self._meta = meta
def with_meta(self, meta):
return Closure(self._code, self._closed_overs, meta)
def invoke(self, args):
return self.invoke_with(args, self)
def invoke_with(self, args, self_fn):
try:
return interpret(self, args, self_obj=self_fn)
exc
|
missionpinball/mpf-mc
|
mpfmc/uix/transitions.py
|
Python
|
mit
| 5,152
| 0
|
import importlib
from kivy.animation import AnimationTransition
from kivy.properties import StringProperty
from kivy.uix.screenmanager import TransitionBase
from kivy.uix.screenmanager import (WipeTransition, SwapTransition,
FadeTransition, FallOutTransition,
RiseInTransition, CardTransition,
NoTransition)
class TransitionManager:
def __init__(self, mc):
self.mc = mc
self._transitions = dict()
self._register_mpf_transitions()
self._register_kivy_transitions()
@property
def transitions(self):
return self._transitions
def register_transition(self, name, transition_cls):
self._transitions[name] = transition_cls
def get_transition(self, transition_config=None):
if transition_config:
# The kivy shader transitions can't accept unexpected kwargs
kwargs = transition_config.copy()
kwargs.pop('type')
return self._transitions[transition_config['type']](**kwargs)
else:
return NoTransition()
def _register_mpf_transitions(self):
for t in self.mc.machine_config['mpf-mc']['mpf_transition_modules']:
i = importlib.import_module('mpfmc.transitions.{}'.format(t))
self.register_transition(getattr(i, 'NAME'),
getattr(i, 'TransitionCls'))
def _register_kivy_transitions(self):
self.register_transition('wipe', WipeTransition)
self.register_transition('swap', SwapTransition)
self.register_transition('fade', FadeTransition)
self.register_transition('fade_back', FallOutTransition)
self.register_transition('rise_in', RiseInTransition)
self.register_transition('card', CardTransition)
self.register_transition('none', NoTransition)
def validate_transitions(self, config):
if 'transition' in config:
if not isinstance(config['transition'], dict):
config['transition'] = dict(type=config['transition'])
try:
config['transition'] = (
self.mc.config_validator.validate_config(
'transitions:{}'.format(config['transition']['type']),
config['transition']))
except KeyError:
raise ValueError('transition: section of config requires a'
' "type:" setting')
else:
config['transition'] = None
if 'transition_out' in config:
if not isinstance(config['transition_out'], dict):
|
config['transition_out'] = dict(type=config['transition_out'])
try:
config['transition_out'] = (
self.mc.config_validator.validate_config(
'transitions:{}'.format(
config['t
|
ransition_out']['type']),
config['transition_out']))
except KeyError:
raise ValueError('transition_out: section of config '
'requires a "type:" setting')
else:
config['transition_out'] = None
return config
class MpfTransition(TransitionBase):
"""Base class for slide transitions in MPF. Use this when writing your
own custom transitions.
"""
easing = StringProperty('linear')
"""String name of the animation 'easing' function that is used to
control the shape of the curve of the animation.
Default is 'linear'.
"""
def __init__(self, **config):
# Use ** here instead of dict so this constructor is compatible with
# the Kivy shader transitions too.
for k, v in config.items():
if hasattr(self, k):
setattr(self, k, v)
super().__init__()
def get_vars(self, progression):
"""Convenience method you can call in your own transition's
on_progress() method to easily get the local vars you need to write
your own transition.
Args:
progression: Float from 0.0 to 1.0 that indicates how far along
the transition is.
Returns:
* Incoming slide object
* Outgoing slide object
* Width of the screen
* Height of the screen
* Modified progression value (0.0-1.0) which factors in the easing
setting that has been applied to this transition.
"""
return (self.screen_in, self.screen_out,
self.manager.width, self.manager.height,
getattr(AnimationTransition, self.easing)(progression))
def on_complete(self):
# reset the screen back to its original position
self.screen_in.pos = self.manager.pos
self.screen_out.pos = self.manager.pos
super().on_complete()
# todo test super().on_complete(). It removes the screen, but is
# that what we want?
def on_progress(self, progression):
raise NotImplementedError
|
jbedorf/tensorflow
|
tensorflow/python/ops/init_ops_v2.py
|
Python
|
apache-2.0
| 26,725
| 0.004041
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations often used for initializing tensors.
All variable initializers returned by functions in this file should have the
following signature:
def _initializer(shape, dtype=dtypes.float32):
Args:
shape: List of `int` representing the shape of the output `Tensor`. Some
initializers may also be able to accept a `Tensor`.
dtype: (Optional) Type of the output `Tensor`.
Returns:
A `Tensor` of type `dtype` and `shape`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_linalg_ops
from tensor
|
flow.python.ops import linalg_ops_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import stateless_random_ops
from tensorflow.python.util.tf_export import tf_export
class Initializer(object):
"""Initializer base class: all initializers inherit from this class.
"""
def __call__(self, shape, dtype=None):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor
|
.
dtype: Optional dtype of the tensor. If not provided will return tensor
of `tf.float32`.
"""
raise NotImplementedError
def get_config(self):
"""Returns the configuration of the initializer as a JSON-serializable dict.
Returns:
A JSON-serializable Python dict.
"""
return {}
@classmethod
def from_config(cls, config):
"""Instantiates an initializer from a configuration dictionary.
Example:
```python
initializer = RandomUniform(-1, 1)
config = initializer.get_config()
initializer = RandomUniform.from_config(config)
```
Args:
config: A Python dictionary.
It will typically be the output of `get_config`.
Returns:
An Initializer instance.
"""
config.pop("dtype", None)
return cls(**config)
@tf_export("zeros_initializer", v1=[])
class Zeros(Initializer):
"""Initializer that generates tensors initialized to 0."""
def __call__(self, shape, dtype=dtypes.float32):
dtype = dtypes.as_dtype(dtype)
return array_ops.zeros(shape, dtype)
@tf_export("ones_initializer", v1=[])
class Ones(Initializer):
"""Initializer that generates tensors initialized to 1."""
def __call__(self, shape, dtype=dtypes.float32):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are
supported.
Raises:
ValuesError: If the dtype is not numeric or boolean.
"""
dtype = dtypes.as_dtype(dtype)
if not dtype.is_numpy_compatible or dtype == dtypes.string:
raise ValueError("Expected numeric or boolean dtype, got %s." % dtype)
return array_ops.ones(shape, dtype)
@tf_export("constant_initializer", v1=[])
class Constant(Initializer):
"""Initializer that generates tensors with constant values.
The resulting tensor is populated with values of type `dtype`, as
specified by arguments `value` following the desired `shape` of the
new tensor (see examples below).
The argument `value` can be a constant value, or a list of values of type
`dtype`. If `value` is a list, then the length of the list must be less
than or equal to the number of elements implied by the desired shape of the
tensor. In the case where the total number of elements in `value` is less
than the number of elements required by the tensor shape, the last element
in `value` will be used to fill the remaining entries. If the total number of
elements in `value` is greater than the number of elements required by the
tensor shape, the initializer will raise a `ValueError`.
Args:
value: A Python scalar, list or tuple of values, or a N-dimensional numpy
array. All elements of the initialized variable will be set to the
corresponding value in the `value` argument.
Raises:
TypeError: If the input `value` is not one of the expected types.
Examples:
The following example can be rewritten using a numpy.ndarray instead
of the `value` list, even reshaped, as shown in the two commented lines
below the `value` list initialization.
```python
>>> import numpy as np
>>> import tensorflow as tf
>>> value = [0, 1, 2, 3, 4, 5, 6, 7]
>>> # value = np.array(value)
>>> # value = value.reshape([2, 4])
>>> init = tf.constant_initializer(value)
>>> print('fitting shape:')
>>> with tf.Session():
>>> x = tf.get_variable('x', shape=[2, 4], initializer=init)
>>> x.initializer.run()
>>> print(x.eval())
fitting shape:
[[ 0. 1. 2. 3.]
[ 4. 5. 6. 7.]]
>>> print('larger shape:')
>>> with tf.Session():
>>> x = tf.get_variable('x', shape=[3, 4], initializer=init)
>>> x.initializer.run()
>>> print(x.eval())
larger shape:
[[ 0. 1. 2. 3.]
[ 4. 5. 6. 7.]
[ 7. 7. 7. 7.]]
>>> print('smaller shape:')
>>> with tf.Session():
>>> x = tf.get_variable('x', shape=[2, 3], initializer=init)
ValueError: Too many elements provided. Needed at most 6, but received 8
```
"""
def __init__(self, value=0):
if not (np.isscalar(value) or isinstance(value, (list, tuple, np.ndarray))):
raise TypeError(
"Invalid type for initial value: %s (expected Python scalar, list or "
"tuple of values, or numpy.ndarray)." % type(value))
self.value = value
def __call__(self, shape, dtype=None):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. If not provided the dtype of the
tensor created will be the type of the inital value.
Raises:
TypeError: If the initializer cannot create a tensor of the requested
dtype.
"""
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
return constant_op.constant(
self.value, dtype=dtype, shape=shape)
def get_config(self):
return {"value": self.value}
@tf_export("random_uniform_initializer", v1=[])
class RandomUniform(Initializer):
"""Initializer that generates tensors with a uniform distribution.
Args:
minval: A python scalar or a scalar tensor. Lower bound of the range
of random values to generate.
maxval: A python scalar or a scalar tensor. Upper bound of the range
of random values to generate. Defaults to 1 for float types.
seed: A Python integer. Used to create random seeds. See
`tf.set_random_seed`
for behavior.
"""
def __init__(self, minval=-0.05, maxval=0.05, seed=None):
self.minval = minval
self.maxval = maxval
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point and integer
types are supported.
Raises:
ValueError: If the dtype is not numeric.
"""
dtype = dtypes.as_dtype(dtype)
if not dtype.is_floating and not dtype.is_inte
|
codles/UpDownMethods
|
UpDownMethods/process.py
|
Python
|
mit
| 5,746
| 0
|
import numpy as np
import pandas as pd
import datetime as dt
def initiate_procedure():
results = pd.DataFrame(columns=('Responses', 'Value', 'Reversal', 'Run',
'Trial', 'Direction', 'DateTime'))
return results
def append_result(res, resp, down, up, stepSize, value):
# Fill in the response for presented value
# Then pass to process to fill in extra details
# And return next stimulation value and results
res.loc[len(res)] = [resp, value, False, 0, len(res)+1, 0, createDT()]
return process_results(res, down, up, stepSize)
def process_results(res, down, up, stepSize):
run = 1 # What run are we in
cntU = 0 # How many incorrect received in a row
cntD = 0 # How many correct received in a row
direction = 0 # Used to track the direction of staircase
if type(stepSize) is list:
sSize = stepSize[0]
else:
sSize = stepSize
for t in res.index:
if t == len(res)-1: # If most recent response
res.loc[t, 'Run'] = run # then fill values
res.loc[t, 'Direction'] = direction
# Default action is no change in value
nextValue = res.loc[t, 'Value']
if res["Responses"][t]: # Correct
|
response given
cntD += 1 # Increment the counter for down
cntU = 0 # Reset the up counter
if cntD == down: # The c
|
orrect number of down responses
cntD = 0 # Reset the counter
if direction != -1: # We found a reversal
if direction != 0: # The first movement is not a reversal
# Only edit values from most recent response
# This is necessary when having different step sizes
# or reversal conditions
if t == len(res)-1:
res.loc[t, 'Reversal'] = True
# Runs are calculated from first to last in a level
# and trials can be counted in multiple runs
n = 0
while res.loc[t-n, 'Value'] == res.loc[t, 'Value']:
res.loc[t-n, 'Run'] += 0.5
n += 1
run += 1
if type(stepSize) is list:
if run < len(stepSize):
sSize = stepSize[run-1]
else:
sSize = stepSize[-1]
direction = -1
nextValue = res.loc[t, 'Value'] + direction * sSize
else: # Incorrect response given
cntU += 1
cntD = 0
if cntU == up:
cntU = 0
if direction != 1:
if direction != 0:
if t == len(res)-1:
res.loc[t, 'Reversal'] = True
n = 0
while res.loc[t-n, 'Value'] == res.loc[t, 'Value']:
res.loc[t-n, 'Run'] += 0.5
n += 1
run += 1
if type(stepSize) is list:
if run < len(stepSize):
sSize = stepSize[run-1]
else:
sSize = stepSize[-1]
direction = 1
nextValue = res.loc[t, 'Value'] + direction * sSize
return nextValue, res
def midpoints(res):
runs = pd.DataFrame(columns=('Run', 'Midpoint', 'CentreTrial'))
if not res.empty:
i = 0
for run in np.unique(np.round(res['Run'])):
run_n = res[abs(res['Run'] - run) <= 0.5]
values = np.unique(run_n["Value"])
start = min(run_n["Trial"])
end = max(run_n["Trial"])
mid = start + (end-start)/2
runs.loc[i] = [int(run), np.mean(values), mid]
i += 1
return runs
def runs(res):
runs = pd.DataFrame(columns=('Run', 'Start', 'Finish'))
if not res.empty:
i = 0
for run in np.unique(np.round(res['Run'])):
run_n = res[abs(res['Run'] - run) <= 0.5]
start = min(run_n["Trial"])
end = max(run_n["Trial"])
runs.loc[i] = [int(run), start, end]
i += 1
return runs
def reversals(res):
if res.empty:
return pd.DataFrame(columns=('Reversals', 'Value', 'Trial'))
else:
res = res[res.Reversal]
reversal = res['Run']
reversal = reversal - 0.5
return pd.DataFrame({'Reversal': reversal,
'Value': res['Value'],
'Trial': res['Trial']})
def estimate_reversals(res, num=2):
if res.empty:
return None
else:
rev = reversals(res)
if len(rev) < num:
return None
else:
rev = rev['Value'].values
num = num * -1
rev = rev[num:]
return np.mean(rev)
def createDT(sep1="-", sep2=":"):
datestring = '%y' + sep1 + '%m' + sep1 + '%dT' \
+ sep2 + '%H' + sep2 + '%M' + sep2 + '%S'
return dt.datetime.strftime(dt.datetime.now(), datestring)
|
paulfanelli/planet_alignment
|
planet_alignment/data/system_data.py
|
Python
|
mit
| 1,159
| 0.000863
|
"""
.. module:: system_data
:platform: linux
:synopsis: The module containing the system data.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
import bunch
import sys
from yaml.parser import ParserError
from zope.interface import implements
from planet_alignment.data.interface import ISystemData
class SystemData(bunch.Bunch):
"""This class houses the system data as a bunch object.
The system data consists of a name, theta, radius and period for each planet.
- **parameters** and **types**::
:param data: The system data as a bunch object.
:type data: Bunch object.
"""
implements(ISystemData)
def __init__(self, data):
try:
|
super(SystemData, self).__init__(data)
except ParserError as pe:
print("ERROR: Error parsing data!")
sys.exit("ERROR: {}".format(pe))
except Exception as e:
print("ERROR: Unknown exceptio
|
n '{}'".format(e))
sys.exit("ERROR: {}".format(e))
def __iter__(self):
return iter(self.system)
def __len__(self):
return len(self.system)
|
gw-sd-2016/Codir
|
codirSublime/SocketIO/websocket/_app.py
|
Python
|
gpl-2.0
| 10,235
| 0.002833
|
"""
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This lib
|
rary is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WIT
|
HOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
"""
WebSocketApp provides higher level APIs.
"""
import threading
import time
import traceback
import sys
import select
import six
from ._core import WebSocket, getdefaulttimeout
from ._exceptions import *
from ._logging import *
from ._abnf import ABNF
__all__ = ["WebSocketApp"]
class WebSocketApp(object):
"""
Higher level of APIs are provided.
The interface is like JavaScript WebSocket object.
"""
def __init__(self, url, header=[],
on_open=None, on_message=None, on_error=None,
on_close=None, on_ping=None, on_pong=None,
on_cont_message=None,
keep_running=True, get_mask_key=None, cookie=None,
subprotocols=None,
on_data=None):
"""
url: websocket url.
header: custom header for websocket handshake.
on_open: callable object which is called at opening websocket.
this function has one argument. The arugment is this class object.
on_message: callbale object which is called when recieved data.
on_message has 2 arguments.
The 1st arugment is this class object.
The passing 2nd arugment is utf-8 string which we get from the server.
on_error: callable object which is called when we get error.
on_error has 2 arguments.
The 1st arugment is this class object.
The passing 2nd arugment is exception object.
on_close: callable object which is called when closed the connection.
this function has one argument. The arugment is this class object.
on_cont_message: callback object which is called when recieve continued
frame data.
on_message has 3 arguments.
The 1st arugment is this class object.
The passing 2nd arugment is utf-8 string which we get from the server.
The 3rd arugment is continue flag. if 0, the data continue
to next frame data
on_data: callback object which is called when a message recieved.
This is called before on_message or on_cont_message,
and then on_message or on_cont_message is called.
on_data has 4 argument.
The 1st arugment is this class object.
The passing 2nd arugment is utf-8 string which we get from the server.
The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came.
The 4rd arugment is continue flag. if 0, the data continue
keep_running: a boolean flag indicating whether the app's main loop
should keep running, defaults to True
get_mask_key: a callable to produce new mask keys,
see the WebSocket.set_mask_key's docstring for more information
subprotocols: array of available sub protocols. default is None.
"""
self.url = url
self.header = header
self.cookie = cookie
self.on_open = on_open
self.on_message = on_message
self.on_data = on_data
self.on_error = on_error
self.on_close = on_close
self.on_ping = on_ping
self.on_pong = on_pong
self.on_cont_message = on_cont_message
self.keep_running = keep_running
self.get_mask_key = get_mask_key
self.sock = None
self.last_ping_tm = 0
self.subprotocols = subprotocols
def send(self, data, opcode=ABNF.OPCODE_TEXT):
"""
send message.
data: message to send. If you set opcode to OPCODE_TEXT,
data must be utf-8 string or unicode.
opcode: operation code of data. default is OPCODE_TEXT.
"""
if not self.sock or self.sock.send(data, opcode) == 0:
raise WebSocketConnectionClosedException("Connection is already closed.")
def close(self):
"""
close websocket connection.
"""
self.keep_running = False
if self.sock:
self.sock.close()
def _send_ping(self, interval, event):
while not event.wait(interval):
self.last_ping_tm = time.time()
if self.sock:
self.sock.ping()
def run_forever(self, sockopt=None, sslopt=None,
ping_interval=0, ping_timeout=None,
http_proxy_host=None, http_proxy_port=None,
http_no_proxy=None, http_proxy_auth=None,
skip_utf8_validation=False,
host=None, origin=None):
"""
run event loop for WebSocket framework.
This loop is infinite loop and is alive during websocket is available.
sockopt: values for socket.setsockopt.
sockopt must be tuple
and each element is argument of sock.setscokopt.
sslopt: ssl socket optional dict.
ping_interval: automatically send "ping" command
every specified period(second)
if set to 0, not send automatically.
ping_timeout: timeout(second) if the pong message is not recieved.
http_proxy_host: http proxy host name.
http_proxy_port: http proxy port. If not set, set to 80.
http_no_proxy: host names, which doesn't use proxy.
skip_utf8_validation: skip utf8 validation.
host: update host header.
origin: update origin header.
"""
if not ping_timeout or ping_timeout <= 0:
ping_timeout = None
if sockopt is None:
sockopt = []
if sslopt is None:
sslopt = {}
if self.sock:
raise WebSocketException("socket is already opened")
thread = None
close_frame = None
try:
self.sock = WebSocket(self.get_mask_key,
sockopt=sockopt, sslopt=sslopt,
fire_cont_frame=self.on_cont_message and True or False,
skip_utf8_validation=skip_utf8_validation)
self.sock.settimeout(getdefaulttimeout())
self.sock.connect(self.url, header=self.header, cookie=self.cookie,
http_proxy_host=http_proxy_host,
http_proxy_port=http_proxy_port,
http_no_proxy=http_no_proxy, http_proxy_auth=http_proxy_auth,
subprotocols=self.subprotocols,
host=host, origin=origin)
self._callback(self.on_open)
if ping_interval:
event = threading.Event()
thread = threading.Thread(target=self._send_ping, args=(ping_interval, event))
thread.setDaemon(True)
thread.start()
while self.sock.connected:
r, w, e = select.select((self.sock.sock, ), (), (), ping_timeout)
if not self.keep_running:
break
if ping_timeout and self.last_ping_tm and time.time() - self.last_ping_tm > ping_timeout:
self.last_ping_tm = 0
raise WebSocketTimeoutException("ping timed out")
if r:
op_code, frame = self.sock.recv_data_frame(True)
if op_code == ABNF.OPCODE_CLOSE:
close_frame = frame
break
elif op_code == ABNF.OPCODE_PING:
self._callback(self.on_ping, frame.data)
|
RasaHQ/rasa_nlu
|
rasa/nlu/run.py
|
Python
|
apache-2.0
| 803
| 0.001245
|
import asyncio
import logging
from typing import Text
from rasa.core.agent import Agent
from rasa.shared.utils.cli import pr
|
int_info, print_success
from
|
rasa.shared.utils.io import json_to_string
logger = logging.getLogger(__name__)
def run_cmdline(model_path: Text) -> None:
"""Loops over CLI input, passing each message to a loaded NLU model."""
agent = Agent.load(model_path)
print_success("NLU model loaded. Type a message and press enter to parse it.")
while True:
print_success("Next message:")
try:
message = input().strip()
except (EOFError, KeyboardInterrupt):
print_info("Wrapping up command line chat...")
break
result = asyncio.run(agent.parse_message(message))
print(json_to_string(result))
|
NREL/bifacial_radiance
|
tests/test_gencumsky.py
|
Python
|
bsd-3-clause
| 4,169
| 0.010794
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 27 10:08:25 2018
@author: cdeline
Using pytest to create unit tests for gencumulativesky.
Note that this can't be included in the repo until TravisCI has a Linux version of gencumsky
set up in .travis.yml
to run unit tests, run pytest from the command line in the bifacial_radiance directory
to run coverage tests, run py.test --cov-repo
|
rt term-missing --cov=bifacial_radiance
"""
#from bifacial_radiance import Rad
|
ianceObj, SceneObj, AnalysisObj
import bifacial_radiance
import numpy as np
import pytest
import os
# try navigating to tests directory so tests run from here.
try:
os.chdir('tests')
except:
pass
TESTDIR = os.path.dirname(__file__) # this folder
# test the readepw on a dummy Boulder EPW file in the /tests/ directory
MET_FILENAME = 'USA_CO_Boulder.724699_TMY2.epw'
# also test a dummy TMY3 Denver file in /tests/
MET_FILENAME2 = "724666TYA.CSV"
DEBUG = True
"""
def test_SingleModule_gencumsky():
import datetime
# 1 module for STC conditions. DNI:900, DHI:100, sun angle: 33 elevation 0 azimuth
name = "_test_fixedtilt_end_to_end"
demo = bifacial_radiance.RadianceObj(name) # Create a RadianceObj 'object'
demo.setGround(0.62)
metdata = demo.readWeatherFile(MET_FILENAME, starttime='06_17_13', endtime='06_17_13')
demo.genCumSky() # 1p, June 17th
# create a scene using panels in landscape at 10 deg tilt, 1.5m pitch. 0.2 m ground clearance
sceneDict = {'tilt':10,'pitch':1.5,'clearance_height':0.2, 'nMods':10, 'nRows':3}
demo.makeModule(name='test',y=0.95,x=1.59, xgap=0)
scene = demo.makeScene('test',sceneDict)
octfile = demo.makeOct(demo.getfilelist()) # makeOct combines all of the ground, sky and object files into a .oct file.
analysis = bifacial_radiance.AnalysisObj(octfile, demo.name) # return an analysis object including the scan dimensions for back irradiance
(frontscan,backscan) = analysis.moduleAnalysis(scene)
analysis.analysis(octfile, demo.name, frontscan, backscan) # compare the back vs front irradiance
assert analysis.mattype[0][:12] == 'a4.1.a0.test'
assert analysis.rearMat[0][:12] == 'a4.1.a0.test'
assert np.mean(analysis.x) == pytest.approx(0)
assert np.mean(analysis.rearY) == pytest.approx(0.00017364868888889194, abs = 0.0001)
if DEBUG:
print(np.mean(analysis.Wm2Front))
print(np.mean(analysis.Wm2Back))
print(np.mean(analysis.backRatio))
# Note: gencumsky has 30-50 Wm-2 variability from run to run... unsure why.
assert np.mean(analysis.Wm2Front) == pytest.approx(1030, abs = 60) #1023,1037,1050, 1035, 1027, 1044, 1015, 1003, 1056
assert np.mean(analysis.Wm2Back) == pytest.approx(133, abs = 15) # 127, 131, 131, 135, 130, 139, 120, 145
# run 1-axis gencumsky option
trackerdict = demo.set1axis(metdata, limit_angle = 45, backtrack = True, gcr = 0.33)
demo.genCumSky1axis(trackerdict)
"""
def test_SingleModule_gencumsky_modelchain():
# duplicate previous sample using modelchain
# 1-axis .ini file
filename = "ini_gencumsky.ini"
(Params)= bifacial_radiance.load.readconfigurationinputfile(inifile=filename)
Params[0]['testfolder'] = TESTDIR
# unpack the Params tuple with *Params
demo2, analysis = bifacial_radiance.modelchain.runModelChain(*Params )
#V 0.2.5 fixed the gcr passed to set1axis. (since gcr was not being passd to set1axis, gcr was default 0.33 default).
assert analysis.mattype[0][:12] == 'a4.1.a0.test'
assert analysis.rearMat[0][:12] == 'a4.1.a0.test'
assert np.mean(analysis.x) == pytest.approx(0)
assert np.mean(analysis.rearY) == pytest.approx(0.00017, abs = 0.00001)
if DEBUG:
print(np.mean(analysis.Wm2Front))
print(np.mean(analysis.Wm2Back))
print(np.mean(analysis.backRatio))
# Note: gencumsky has 30-50 Wm-2 variability from run to run... unsure why.
assert np.mean(analysis.Wm2Front) == pytest.approx(1030, abs = 60) #1023,1037,1050, 1035, 1027, 1044, 1015, 1003, 1056
assert np.mean(analysis.Wm2Back) == pytest.approx(133, abs = 15) # 127, 131, 131, 135, 130, 139, 120, 145
|
getsentry/obs
|
setup.py
|
Python
|
apache-2.0
| 1,534
| 0
|
#!/usr/bin/env python
"""
obs
===
:copyright: (c) 2015 Functional Software, Inc
:license: Apache 2.0, see LICENSE for more details.
"""
from __future__ import absolute_import, unicode_literals
import os.path
from setuptools import setup, find_packages
# Hack to prevent stupid "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when running `python
# setup.py test` (see
# http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
for m in ('multiprocessing', 'billiard'):
try:
__import__(m)
except ImportError:
pass
ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
tests_require = [
'flake8>=2.1.0,<2.2.0',
'mock>=1.0.
|
1,<1.1.0',
'pytest>=2.5.0,<2.6.0',
'pytest-cov>=1.6,<1.7',
'pytest-timeout>=0.3,<0.4',
'pytest-xdist>=1.9,<1.10',
]
install_requires = [
]
setup(
name='obs',
version='0.0.0',
author='David Cramer',
author_email='dcramer@gmail
|
.com',
url='https://github.com/getsentry/obs',
description='',
long_description=open('README.md').read(),
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=install_requires,
extras_require={
'test': tests_require,
},
license='Apache 2.0',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
kobejean/tensorflow
|
tensorflow/python/profiler/model_analyzer_test.py
|
Python
|
apache-2.0
| 33,231
| 0.009088
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import io
import os
import random
import re
import numpy as np
from tensorflow.core.profiler import profile_pb2
from tensorflow.core.profiler import tfprof_log_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.profiler import model_analyzer
from tensorflow.python.profiler import option_builder
from tensorflow.python.profiler import profile_context
from tensorflow.python.profiler.internal import model_analyzer_testlib as lib
from tensorflow.python.util import compat
builder = option_builder.ProfileOptionBuilder
class PrintModelAnalysisTest(test.TestCase):
def _no_rewrite_session_config(self):
rewriter_config = rewriter_config_pb2.RewriterConfig(
pin_to_host_optimization=rewriter_config_pb2.RewriterConfig.OFF)
graph_options = config_pb2.GraphOptions(rewrite_options=rewriter_config)
return config_pb2.ConfigProto(graph_options=graph_options)
def testDumpToFile(self):
ops.reset_default_graph()
outfile = os.path.join(test.get_temp_dir(), 'dump')
opts = builder(builder.trainable_variables_parameter()
).with_file_output(outfile).build()
with session.Session(config=self._no_rewrite_session_config()) as sess:
_ = lib.BuildSmallModel()
model_analyzer.profile(sess.graph, options=opts)
with gfile.Open(outfile, 'r') as f:
self.assertEqual(u'node name | # parameters\n'
'_TFProfRoot (--/451 params)\n'
' DW (3x3x3x6, 162/162 params)\n'
' DW2 (2x2x6x12, 288/288 params)\n'
' ScalarW (1, 1/1 params)\n',
lib.CheckAndRemoveDoc(f.read()))
def testSelectEverythingDetail(self):
ops.reset_default_graph()
dev = '/device:GPU:0' if test.is_gpu_available() else '/device:CPU:0'
outfile = os.path.join(test.get_temp_dir(), 'dump')
opts = (builder(builder.trainable_variables_parameter())
.with_file_output(outfile)
.with_accounted_types(['.*'])
.select(['micros', 'bytes', 'params', 'float_ops', 'occurrence',
'device', 'op_types', 'input_shapes']).build())
with profile_context.ProfileContext(test.get_temp_dir(),
trace_steps=[],
dump_steps=[]) as pctx:
with session.Session(
config=self._no_rewrite_session_config()) as sess, ops.device(dev):
x = lib.BuildSmallModel()
sess.run(variables.global_variables_initializer())
pctx.trace_next_step()
pctx.dump_next_step()
_ = sess.run(x)
pctx.profiler.profile_name_scope(options=opts)
with gfile.Open(outfile, 'r') as f:
# pylint: disable=line-too-long
dump_str = lib.CheckAndRemoveDoc(f.read())
outputs = dump_str.split('\n')
self.assertEqual(outputs[0],
'node name | # parameters | # float_ops | requested bytes | total execution time | accelerator execution time | cpu execution time | assigned devices | op types | op count (run|defined) | input shapes')
for o in outputs[1:]:
if o.find('Conv2D ') > 0:
metrics = o[o.find('(') +1: o
|
.find(')')].split(',')
# Make sure time is profiled.
gap = 1 if test.is_gpu_available() else 2
for i in range(3, 6, gap):
mat = re.search('(.*)(?:us|ms|sec)/(.*)(?:us|ms|sec)', metrics[i])
|
self.assertGreater(float(mat.group(1)), 0.0)
self.assertGreater(float(mat.group(2)), 0.0)
# Make sure device is profiled.
if test.is_gpu_available():
self.assertTrue(metrics[6].find('gpu') > 0)
self.assertFalse(metrics[6].find('cpu') > 0)
else:
self.assertFalse(metrics[6].find('gpu') > 0)
self.assertTrue(metrics[6].find('cpu') > 0)
# Make sure float_ops is profiled.
mat = re.search('(.*)k/(.*)k flops', metrics[1].strip())
self.assertGreater(float(mat.group(1)), 0.0)
self.assertGreater(float(mat.group(2)), 0.0)
# Make sure op_count is profiled.
self.assertEqual(metrics[8].strip(), '1/1|1/1')
# Make sure input_shapes is profiled.
self.assertEqual(metrics[9].strip(), '0:2x6x6x3|1:3x3x3x6')
if o.find('DW (3x3x3x6') > 0:
metrics = o[o.find('(') +1: o.find(')')].split(',')
mat = re.search('(.*)/(.*) params', metrics[1].strip())
self.assertGreater(float(mat.group(1)), 0.0)
self.assertGreater(float(mat.group(2)), 0.0)
# pylint: enable=line-too-long
# Test that profiler restored from profile file gives the same result.
gfile.Remove(outfile)
profile_file = os.path.join(test.get_temp_dir(), 'profile_1')
with lib.ProfilerFromFile(profile_file) as profiler:
profiler.profile_name_scope(options=opts)
with gfile.Open(outfile, 'r') as f:
self.assertEqual(dump_str, lib.CheckAndRemoveDoc(f.read()))
def testSelectEverything(self):
ops.reset_default_graph()
outfile = os.path.join(test.get_temp_dir(), 'dump')
opts = (builder(builder.trainable_variables_parameter())
.with_file_output(outfile)
.with_accounted_types(['.*'])
.select(['params', 'float_ops', 'occurrence', 'device', 'op_types',
'input_shapes']).build())
with session.Session(config=self._no_rewrite_session_config()
) as sess, ops.device('/device:CPU:0'):
x = lib.BuildSmallModel()
sess.run(variables.global_variables_initializer())
run_meta = config_pb2.RunMetadata()
_ = sess.run(x,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=run_meta)
model_analyzer.profile(
sess.graph, run_meta, options=opts)
def testSimpleCodeView(self):
ops.reset_default_graph()
outfile = os.path.join(test.get_temp_dir(), 'dump')
# TODO(xpan): Test 'micros'. Since the execution time changes each run,
# it's a bit difficult to test it now.
opts = (builder(builder.trainable_variables_parameter())
.with_file_output(outfile)
.with_accounted_types(['.*'])
.with_node_names(show_name_regexes=['.*model_analyzer_testlib.*'])
.account_displayed_op_only(False)
.select(['bytes', 'params', 'float_ops', 'num_hidden_ops', 'device',
'input_shapes']).build())
with session.Session(config=self._no
|
sunlightlabs/hanuman
|
data_collection/urls.py
|
Python
|
bsd-3-clause
| 730
| 0.00137
|
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
import views
urlpatterns = [
url(r'^firms/$', views.FirmList.as_view()),
url(r'^firms/(?P<pk>[0-9]+)/$', views.FirmDetail.as_view()),
url(r'^firms/next/$', views.NextFirmDetail.as_view()),
url(r'^bio-pages/$', views.BioPageCreate.as_view()),
url(r'^view-logs/$', views.ViewLogCreate.as_vie
|
w()),
url(r'^flags/$', views.FlagCreate.as_view()),
url(r'^token-auth/', 'rest_framework_jwt.views.obtain_jwt_token'),
url(r'^token-auth-ns/', views.ObtainJSONWebTokenNS.as_view()),
url(r'^token-refresh/', 'rest_framewo
|
rk_jwt.views.refresh_jwt_token'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
|
orione7/Italorione
|
servers/megadrive.py
|
Python
|
gpl-3.0
| 1,956
| 0.002559
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para megadrive
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# by DrZ3r0
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
# Returns an array of possible video url's from the page_url
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("[megadrive.py] get_video_url(page_url='
|
%s')" % page_url)
video_urls = []
data = scrapertools.cache_page(page_url)
data_pack = scrapertools.find_single_match(data, "(eval.functi
|
on.p,a,c,k,e,.*?)\s*</script>")
if data_pack != "":
from core import unpackerjs3
data_unpack = unpackerjs3.unpackjs(data_pack)
if data_unpack == "":
from core import jsunpack
data_unpack = jsunpack.unpack(data_pack)
data = data_unpack
video_url = scrapertools.find_single_match(data, 'file"?\s*:\s*"([^"]+)",')
video_urls.append(["[megadrive]", video_url])
for video_url in video_urls:
logger.info("[megadrive.py] %s - %s" % (video_url[0], video_url[1]))
return video_urls
# Encuentra v�deos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = r"""http://megadrive.tv/(?:embed-)?([a-z0-9A-Z]+)"""
logger.info("[megadrive.py] find_videos #" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[megadrive]"
url = 'http://megadrive.tv/embed-%s-640x360.html' % match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'megadrive'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
|
Swiftea/Crawler
|
crawler/tests/test_data.py
|
Python
|
gpl-3.0
| 2,567
| 0.007803
|
#!/usr/bin/env python3
from shutil import rmtree
from os import remove, path
from crawler.swiftea_bot.data import BASE_LINKS
URL = "http://aetfiws.ovh"
SUGGESTIONS = ['http://suggestions.ovh/page1.html', 'http://suggestions.ovh/page2.html']
CODE1 = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="Description" content="Moteur de recherche">
<title>Swiftea</title>
<link rel="stylesheet" href="public/css/reset.css">
<link rel="icon" href="public/favicon.ico" type="image/x-
|
icon">
</head>
<body>
<p>une <a href="demo">CSS Demo</a> ici!</p>
<h1>Gros t
|
itre🤣 </h1>
<h2>Moyen titre</h2>
<h3>petit titre</h3>
<p><strong>strong </strong><em>em</em></p>
<a href="index">
<img src="public/themes/default/img/logo.png" alt="Swiftea">
</a>
du texte au milieu
<a href="about/ninf.php" rel="noindex, nofollow">Why use Swiftea ?1</a>
<a href="about/ni.php" rel="noindex">Why use Swiftea ?2</a>
<a href="about/nf.php" rel="nofollow">Why use Swiftea ?3</a>
<img src="public/themes/default/img/github.png" alt="Github Swiftea">
<img src="public/themes/default/img/twitter.png" alt="Twitter Swiftea">
<p>©</p>
<p>></p>
</body>
</html>
"""
CODE2 = """<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-language" content="en">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-16 LE" />
<link rel="shortcut icon" href="public/favicon2.ico" type="image/x-icon">
</head>
<body>
</body>
</html>
"""
CODE3 = """<!DOCTYPE html>
<html>
<head>
<meta name="language" content="fr">
</head>
<body>
</body>
</html>
"""
INVERTED_INDEX = {'EN': {
'A': {'ab': {'above': {1: .3, 2: .1}, 'abort': {1: .3, 2: .1}}},
'W': {'wo': {'word': {1: .3, 30: .4}}}}, 'FR': {
'B': {'ba': {'bateau': {1: .5}}, 'bo': {'boule': {1: .25, 2: .8}}}}}
CLEANED_KEYWORDS = [
('le', 1),
('2015', 1),
('bureau', 1),
('word', 1),
('example', 1),
('oiseau', 1),
('quoi', 1),
('epee', 1),
('clock', 1),
('çochon', 1),
('12h', 1)
]
def reset(DIR_DATA):
if path.exists(DIR_DATA):
rmtree(DIR_DATA)
else:
rmtree('badwords')
rmtree('stopwords')
rmtree('inverted_index')
rmtree('links')
rmtree('config')
rmtree('stats')
# for global tests:
if path.exists('test_redirect_output.ext'):
remove('test_redirect_output.ext')
|
sloria/device-inventory
|
inventory/settings/__init__.py
|
Python
|
bsd-3-clause
| 214
| 0
|
""" Settings for inventory """
from .base import *
t
|
ry:
from .local i
|
mport *
except ImportError, exc:
exc.args = tuple(
['%s (did you rename settings/local-dist.py?)' % exc.args[0]])
raise exc
|
cs-au-dk/Artemis
|
WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_input.py
|
Python
|
gpl-3.0
| 2,580
| 0.001163
|
#!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class TestInput(object):
"""Groups information about a test for easy passing of data."""
def __init__(self, test_name, timeout):
"""Holds the input parameters for a test.
Args:
test: name of test (not an absolute path!)
timeout: Timeout in msecs the driver should use while running the test
"""
self.test_name = test_name
self.timeout = timeout
# TestInput objects are normally constructed by the manager and passed
# to the workers, but these two fields are set lazily in the workers
# beca
|
use they require us to figure out if the test is a reftest or not
# and we want to be able to do that in parallel.
self.should_run_pixel_tests = None
self.reference_files = None
def __repr__(self):
return "TestInput('%s', %d, %s, %s)" % (self.test_name, self.timeout, self.
|
should_run_pixel_tests, self.reference_files)
|
tectronics/crossepg
|
src/enigma2/python/plugin.py
|
Python
|
lgpl-2.1
| 2,321
| 0.046963
|
from crossepglib import CrossEPG_Config
from crossepg_main import crossepg_main
from crossepg_locale import _
from Plugins.Plugin import PluginDescriptor
def setup(menuid, **kwargs):
if menuid == "setup":
return [("CrossEPG", crossepg_main.setup, "crossepg", None)]
else:
return []
def call_downloader(session, **kwargs):
crossepg_main.downloader(session)
def call_loaderAsPlugin(session, **kwargs):
crossepg_main.loaderAsPlugin(session)
def call_setup(session, **kwargs):
crossepg_main.setup(session)
def call_autostart(reason, session):
crossepg_main.autostart(reason, session)
def Plugins(**kwargs):
config = CrossEPG_Config()
config.load()
plugins = list()
if config.show_plugin == 1 and config.show_extension == 1:
plugins.append(PluginDescriptor(name="CrossEPG Downloader",
description=_("An EPG downloader"),
where = [ PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_PLUGINMENU ],
fnc = call_downloader))
elif config.show_extension == 1:
plugins.append(PluginDescriptor(name="CrossEPG Downloader",
description=_("An EPG downloader"),
where = PluginDescriptor.WHERE_EXTENSIONSMENU,
fnc = call_downloader))
elif config.show_plugin == 1:
plugins.append(PluginDescriptor(name="CrossEPG Downloader",
description=_("An EPG downloader"),
where = PluginDescriptor.WHERE_PLUGINMENU,
fnc = call_downloader))
if config.isQBOXHD():
plugins.append(PluginDescriptor(name="CrossEPG",
description=_("CrossEPG setup panel"),
where = PluginDescriptor.WHERE_PLUGINMENU,
fnc = call_setup))
el
|
se:
plugins.append(PluginDescriptor(name="CrossEPG",
description=_("CrossEPG setup panel"),
|
where = PluginDescriptor.WHERE_MENU,
fnc = setup))
plugins.append(PluginDescriptor(name="CrossEPG Auto",
description = _("CrossEPG automatic actions"),
where = PluginDescriptor.WHERE_SESSIONSTART,
fnc = call_autostart))
if config.show_force_reload_as_plugin == 1:
plugins.append(PluginDescriptor(name="CrossEPG Force Reload",
description=_("CrossEPG Force Reload"),
where = PluginDescriptor.WHERE_PLUGINMENU,
fnc = call_loaderAsPlugin))
return plugins;
|
jianwei1216/my-scripts
|
mytest/python/MyInternet/myserver.py
|
Python
|
gpl-2.0
| 400
| 0.0025
|
#!/usr/bin/python
|
import socket
def server_test():
s = socket.socket()
host = socket.gethostname()
port = 12345
s.bind((host, port))
s.listen(5)
while True:
c, addr = s.accept()
print c
print 'connect addr: ', addr
c.send('Welcome to CaiNiao!')
if cmp(c.recv(1024), "GoodBye") == 0:
|
break
c.close()
s.close()
|
rafaelbezerra-dev/PlantMonitoringSystem
|
monitoring_node/node.py
|
Python
|
gpl-3.0
| 1,138
| 0.011424
|
import sys, os, json
physical_addess = ''
node_info = None
def getMacAdd
|
ress():
if sys.platform == 'win32':
for line in os.popen("ipconfig /all"):
if line.lstrip().startswith('Physical Address'):
mac = line.split(':')[1].strip().replace('-',':')
break
else:
|
for line in os.popen("/sbin/ifconfig"):
if line.find('Ether') > -1:
mac = line.split()[4]
break
return mac
def get(new_node = False, userId = None):
global physical_addess
global node_info
if not physical_addess:
physical_addess = getMacAddress()
f = open('node.json', 'r')
node_info = json.loads(f.read())
f.close()
node_info['physicalAddress'] = physical_addess
if userId is not None:
node_info['userId'] = userId
if new_node:
node_info.pop("id", None)
return node_info
def update(node):
global node_info
node_info = node
f = open('node.json', 'w')
serialized = json.dumps(node, sort_keys=True, indent=4, separators=(',', ': '))
f.write(serialized)
f.close()
return 1
|
wnavarre/email-dictator
|
script/template_test.py
|
Python
|
mit
| 1,955
| 0.008184
|
import template as t
def test_template_once(inp, vals, funcs, output):
actual = t.Template(inp).parse(vals, funcs)
print (inp, vals, funcs, output, actual)
assert(actual == output)
print True
def test_basic_vals_0():
test_template_once("", {}, {}, "")
test_template_once("HI", {}, {}, "HI")
def test_basic_vals_1():
inp = "@@@HI@@@"
vals = {"HI": "hEllo", "hi": "hoho"}
funcs = {}
output = "hEllo"
test_template_once(inp, vals, funcs, output)
def test_basic_vals_2():
inp = "My name is @@@name@@@ and I am @@@age@@
|
@."
vals = {"name": "William", "age": 20}
funcs = {}
output = "My name is William and I am 20."
test_template_once(inp, vals, funcs, output)
def test_basic_func():
inp = "I am @@@age@@@ so I can @@@\\tooyoung@@@buy alcohol."
vals1 = {"age": 20}
vals
|
2 = {"age": 21}
def f(vals, funcs):
if vals["age"] < 21:
return "not "
else:
return ""
funcs = {"tooyoung": f}
output1 = "I am 20 so I can not buy alcohol."
output2 = "I am 21 so I can buy alcohol."
test_template_once(inp, vals1, funcs, output1)
test_template_once(inp, vals2, funcs, output2)
def test_compound_func():
inp = "I am @@@age@@@ again so I can @@@\\notiftooyoung@@@buy alcohol."
vals1 = {"age": 20}
vals2 = {"age": 21}
def too_young(vals, funcs):
return vals["age"] < 21
def not_if_too_young(vals, funcs):
if funcs["too_young"]:
return "not "
else:
return ""
funcs = {"notiftooyoung": not_if_too_young,
"too_young": too_young}
output1 = "I am 20 again so I can not buy alcohol."
output2 = "I am 21 again so I can not buy alcohol."
test_template_once(inp, vals1, funcs, output1)
test_template_once(inp, vals2, funcs, output2)
test_basic_vals_0()
test_basic_vals_1()
test_basic_vals_2()
test_basic_func()
test_compound_func()
|
wikkii/raspluonto
|
old/python_flask/old/main.py
|
Python
|
mit
| 388
| 0.064433
|
from dbconnect import connection
from flask import Flask, render_template
@app.route('/index/')
def display_data():
try:
c, conn = connection()
query = "SELECT * from sensors"
c
|
.execute(query)
data = c.fetchall()
conn.connection()
#return data
return r
|
ender_template("index.php", data=data)
except Exception as e:
return (str(e))
|
ray306/expy
|
test/show_picture.py
|
Python
|
gpl-3.0
| 882
| 0.004535
|
# coding:utf-8
##### package test #####
import sys
sys.path = ['../']+sys.path
################
from expy import * # Import the needed functions
start() # Initiate the experiment environment
'''General usage'''
# Draw a picture on the canvas center
drawPic('data/demo.jpg')
show(3) # Display current canvas
''''''
|
# Draw a zoomed picture on the canvas center
drawPic('data/demo.jpg', w=400, h=300)
|
show(3) # Display current canvas
# Draw a zoomed picture on the canvas center
drawPic('data/demo.jpg', w=300, h=400, rotate=90)
show(3) # Display current canvas
# Draw a zoomed picture on the canvas, and move it
drawPic('data/demo.jpg', w=400, h=300, x=0.5, y=0.5)
show(3) # Display current canvas
# Draw a zoomed picture on the canvas, and move it
drawPic('data/demo.jpg', w=400, h=300, x=0.5, y=0.5, anchor_x='right',anchor_y='center')
show(3) # Display current canvas
|
wevoice/wesub
|
apps/videos/rpc.py
|
Python
|
agpl-3.0
| 11,527
| 0.002429
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import datetime
from django.conf import settings
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.template import RequestContext
from django.template.defaultfilters import slugify
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from subtitles.models import SubtitleLanguage
from videos.models import Video
from videos.tasks import send_change_title_email
from utils.multi_query_set import MultiQuerySet
from utils.rpc import Error, Msg, RpcExceptionEvent, add_request_to_kwargs
from utils.translation import get_user_languages_from_request
class VideosApiClass(object):
authentication_error_msg = ugettext_lazy(u'You should be authenticated.')
popular_videos_sorts = {
'week': 'week_views',
'month': 'month_views',
'year': 'year_views',
'total': 'total_views'
}
def unfeature_video(self, video_id, user):
if not user.has_perm('videos.edit_video'):
raise RpcExceptionEvent(_(u'You have not permission'))
try:
c = Video.objects.filter(pk=video_id).update(featured=None)
except (ValueError, TypeError):
raise RpcExceptionEvent(_(u'Incorrect video ID'))
if not c:
raise RpcExceptionEvent(_(u'Video does not exist'))
return {}
def feature_video(self, video_id, user):
if not user.has_perm('videos.edit_video'):
raise RpcExceptionEvent(_(u'You have not permission'))
try:
c = Video.objects.filter(pk=video_id).update(featured=datetime.datetime.today())
except (ValueError, TypeError, Video.DoesNotExist):
raise RpcExceptionEvent(_(u'Incorrect video ID'))
if not c:
raise RpcExceptionEvent(_(u'Video does not exist'))
return {}
@add_request_to_kwargs
def load_video_languages(self, video_id, user, request):
"""
Load langs for search pages. Will take into consideration
the languages the user speaks.
Ordering is user language, then completness , then percentage
then name of the language.
We're sorting all in memory since those sets should be pretty small
"""
LANGS_COUNT = 7
try:
video = Video.objects.get(pk=video_id)
except Video.DoesNotExist:
video = None
user_langs = get_user_languages_from_request(request)
langs = list(video.newsubtitlelanguage_set.having_nonempty_tip())
first_languages = [] #user languages and original
other_languages = [] #other languages already ordered by subtitle_count
for language in langs:
if language.language_code in user_langs or language.is_primary_audio_language():
first_languages.append(language)
else:
other_languages.append(language)
def _cmp_first_langs(lang1, lang2):
"""
languages should original in user_langs
"""
in_user_language_cmp = cmp(lang1.language_code in user_langs, lang2.language_code in user_langs)
#one is not in user language
if in_user_language_cmp != 0:
return in_user_language_cmp
if lang1.language_code in user_langs:
#both in user's language, sort alphabetically
|
return
|
cmp(lang2.get_language_code_display(), lang1.get_language_code_display())
#one should be original
return cmp(lang1.is_original, lang2.is_original)
first_languages.sort(cmp=_cmp_first_langs, reverse=True)
#fill first languages to LANGS_COUNT
if len(first_languages) < LANGS_COUNT:
other_languages = other_languages[:(LANGS_COUNT-len(first_languages))]
other_languages.sort(lambda l1, l2: cmp(l1.get_language_code_display(), l2.get_language_code_display()))
langs = first_languages + other_languages
else:
langs = first_languages[:LANGS_COUNT]
context = {
'video': video,
'languages': langs
}
return {
'content': render_to_string('videos/_video_languages.html', context)
}
@add_request_to_kwargs
def load_featured_page(self, page, request, user):
qs = Video.objects.featured()
return render_page(page, qs, request=request)
@add_request_to_kwargs
def load_latest_page(self, page, request, user):
qs = Video.objects.latest()
return render_page(page, qs, request=request)
@add_request_to_kwargs
def load_featured_page_volunteer(self, page, request, user):
rel, rest = self._get_volunteer_sqs(request, user)
rel = rel.filter(featured__gt=datetime.datetime(datetime.MINYEAR, 1, 1)) \
.order_by('-featured')
rest = rest.filter(featured__gt=datetime.datetime(datetime.MINYEAR, 1, 1)) \
.order_by('-featured')
count = rel.count() + rest.count()
mqs = MultiQuerySet(rel, rest)
mqs.set_count(count)
return render_page(page, mqs, request=request)
@add_request_to_kwargs
def load_requested_page_volunteer(self, page, request, user):
user_langs = get_user_languages_from_request(request)
rel, rest = self._get_volunteer_sqs(request, user)
rel = rel.filter(requests_exact__in=user_langs)
rest = rest.filter(requests_exact__in=user_langs)
count = rel.count() + rest.count()
mqs = MultiQuerySet(rel, rest)
mqs.set_count(count)
return render_page(page, mqs, request=request)
@add_request_to_kwargs
def load_latest_page_volunteer(self, page, request, user):
rel, rest = self._get_volunteer_sqs(request, user)
rel = rel.order_by('-created')
rest = rest.order_by('-created')
count = rel.count() + rest.count()
mqs = MultiQuerySet(rel, rest)
mqs.set_count(count)
return render_page(page, mqs, request=request)
@add_request_to_kwargs
def load_popular_page_volunteer(self, page, sort, request, user):
sort_types = {
'today': 'today_views',
'week' : 'week_views',
'month': 'month_views',
'year' : 'year_views',
'total': 'total_views'
}
sort_field = sort_types.get(sort, 'week_views')
rel, rest = self._get_volunteer_sqs(request, user)
rel = rel.order_by('-%s' % sort_field)
rest = rest.order_by('-%s' % sort_field)
count = rel.count() + rest.count()
mqs = MultiQuerySet(rel, rest)
mqs.set_count(count)
return render_page(page, mqs, request=request)
def change_title_video(self, video_pk, title, user):
title = title.strip()
if not user.is_authenticated():
return Error(self.authentication_error_msg)
if not title:
return Error(_(u'Title can\'t be empty'))
try:
video = Video.objects.get(pk=video_pk)
if title and not video.title or video.is_html5() or user.is_superuser:
if title != video.title:
old_title = video.title_display()
video.title = title
video.slug = slugify(video.title)
|
plone/plone.app.mosaic
|
src/plone/app/mosaic/browser/upload.py
|
Python
|
gpl-2.0
| 3,388
| 0
|
# -*- coding: utf-8 -*-
from plone import api
from plone.app.mosaic import _
from zope.publisher.browser import BrowserView
import json
class MosaicUploadView(BrowserView):
"""Handle file uploads"""
def __call__(self):
context = self.context
request = self.request
# Set header to json
request.response.setHeader('Content-Type', 'application/json')
ctr_tool = api.portal.get_tool('content_type_registry')
id = request['uploadfile'].filename
content_type = request['uploadfile'].headers["Content-Type"]
typename = ctr_tool.findTypeName(id, content_type, "")
# 1) check if we are allowed to create an Image in folder
if typename not in [t.id for t in context.getAllowedTypes()]:
error = {}
error['status'] = 1
error['message'] =\
_(u"Not allowed to upload a file of this type to this folder")
return json.dumps(error)
# 2) check if the current user has permissions to add stuff
if not context.portal_membership.checkPermission('Add portal content',
context):
error = {}
error['status'] = 1
error['message'] =\
_(u"You do not have permission to upload files in this folder")
return json.dumps(error)
# Get an unused filename without path
id = self.cleanupFilename(id)
title = request['uploadfile'].filename
newid = context.invokeFactory(type_name=typename, id=id)
if newid is None or newid == '':
newid = id
obj = getattr(context, newid, None)
# Set title
# Attempt to use Archetypes mutator if there is one,
# in case it uses a custom storage
if title:
try:
obj.setTitle(title)
except AttributeError:
obj.title = title
# set primary field
pf = obj.getPrimaryField()
pf.set(obj, request['uploadfile'])
if not obj:
error = {}
error['status'] = 1
error['message'] = _(u"Could not upload the file")
return json.dumps(error)
obj.reindexObject()
message = {}
message
|
['status'] = 0
message['url'] = obj.absolute_url()
message['title'] = title
return json.dumps(
|
message)
def cleanupFilename(self, name):
"""Generate a unique id which doesn't match the system generated ids"""
context = self.context
id = ''
name = name.replace('\\', '/') # Fixup Windows filenames
name = name.split('/')[-1] # Throw away any path part.
for c in name:
if c.isalnum() or c in '._':
id += c
# Raise condition here, but not a lot we can do about that
if context.check_id(id) is None and getattr(context, id, None) is None:
return id
# Now make the id unique
count = 1
while 1:
if count == 1:
sc = ''
else:
sc = str(count)
newid = "copy{0:s}_of_{1:s}".format(sc, id)
if context.check_id(newid) is None \
and getattr(context, newid, None) is None:
return newid
count += 1
|
TheTimmy/spack
|
var/spack/repos/builtin/packages/namd/package.py
|
Python
|
lgpl-2.1
| 5,455
| 0.000183
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License alon
|
g with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import platform
import shutil
import sys
import os
from spack import *
class Namd(MakefilePackage):
"""NAMDis a parallel molecular dynamics code designed for
high-performance simulation of lar
|
ge biomolecular systems."""
homepage = "http://www.ks.uiuc.edu/Research/namd/"
url = "file://{0}/NAMD_2.12_Source.tar.gz".format(os.getcwd())
version('2.12', '2a1191909b1ab03bf0205971ad4d8ee9')
variant('fftw', default='3', values=('none', '2', '3', 'mkl'),
description='Enable the use of FFTW/FFTW3/MKL FFT')
variant('interface', default='none', values=('none', 'tcl', 'python'),
description='Enables TCL and/or python interface')
depends_on('charm')
depends_on('fftw@:2.99', when="fftw=2")
depends_on('fftw@3:', when="fftw=3")
depends_on('intel-mkl', when="fftw=mkl")
depends_on('tcl', when='interface=tcl')
depends_on('tcl', when='interface=python')
depends_on('python', when='interface=python')
def _copy_arch_file(self, lib):
config_filename = 'arch/{0}.{1}'.format(self.arch, lib)
shutil.copy('arch/Linux-x86_64.{0}'.format(lib),
config_filename)
if lib == 'tcl':
filter_file(r'-ltcl8\.5',
'-ltcl{0}'.format(self.spec['tcl'].version.up_to(2)),
config_filename)
def _append_option(self, opts, lib):
if lib != 'python':
self._copy_arch_file(lib)
spec = self.spec
opts.extend([
'--with-{0}'.format(lib),
'--{0}-prefix'.format(lib), spec[lib].prefix
])
@property
def arch(self):
plat = sys.platform
if plat.startswith("linux"):
plat = "linux"
march = platform.machine()
return '{0}-{1}'.format(plat, march)
@property
def build_directory(self):
return '{0}-spack'.format(self.arch)
def edit(self, spec, prefix):
with working_dir('arch'):
with open('{0}.arch'.format(self.build_directory), 'w') as fh:
# this options are take from the default provided
# configuration files
optims_opts = {
'gcc': '-m64 -O3 -fexpensive-optimizations -ffast-math',
'intel': '-O2 -ip'
}
optim_opts = optims_opts[self.compiler.name] \
if self.compiler.name in optims_opts else ''
fh.write('\n'.join([
'NAMD_ARCH = {0}'.format(self.arch),
'CHARMARCH = ',
'CXX = {0.cxx} {0.cxx11_flag}'.format(
self.compiler),
'CXXOPTS = {0}'.format(optim_opts),
'CC = {0}'.format(self.compiler.cc),
'COPTS = {0}'.format(optim_opts),
''
]))
self._copy_arch_file('base')
opts = ['--charm-base', spec['charm'].prefix]
fftw_version = spec.variants['fftw'].value
if fftw_version == 'none':
opts.append('--without-fftw')
elif fftw_version == 'mkl':
self._append_option(opts, 'mkl')
else:
_fftw = 'fftw{0}'.format('' if fftw_version == '2' else '3')
self._copy_arch_file(_fftw)
opts.extend(['--with-{0}'.format(_fftw),
'--fftw-prefix', spec['fftw'].prefix])
interface_type = spec.variants['interface'].value
if interface_type != 'none':
self._append_option(opts, 'tcl')
if interface_type == 'python':
self._append_option(opts, 'python')
else:
opts.extend([
'--without-tcl',
'--without-python'
])
config = Executable('./config')
config(self.build_directory, *opts)
def install(self, spec, prefix):
with working_dir(self.build_directory):
mkdirp(prefix.bin)
install('namd2', prefix.bin)
# I'm not sure this is a good idea or if an autoload of the charm
# module would not be better.
install('charmrun', prefix.bin)
|
JoshuaSkelly/TroubleInCloudLand
|
main.py
|
Python
|
mit
| 12,372
| 0.003476
|
#!/usr/bin/python
import pygame
import enemies
from core import balloon, bullet, game, gem, particle, player, world
from scenes import credits, scene, splashscreen
from ui import menu, text
from utils import prettyprint, utility, vector
from utils.settings import *
pygame.init()
utility.read_settings()
if settings_list[SETTING_FULLSCREEN]:
screen = utility.set_fullscreen()
else:
screen = utility.set_fullscreen(False)
pygame.display.set_icon(utility.load_image('icon'))
pygame.display.set_caption('Trouble In CloudLand v1.1')
screen.fill((0, 0, 0))
tempText = text.Text(FONT_PATH, 36, (255, 255, 255))
tempText.set_text('Loading...')
tempText.position = vector.Vector2d((SCREEN_WIDTH / 2) - (tempText.image.get_width() / 2), (SCREEN_HEIGHT / 2) - (tempText.image.get_height() / 2))
tempText.update()
tempText.draw(screen)
pygame.display.flip()
try:
pygame.mixer.set_reserved(MUSIC_CHANNEL)
pygame.mixer.Channel(MUSIC_CHANNEL).set_volume(1)
pygame.mixer.set_reserved(PLAYER_CHANNEL)
pygame.mixer.Channel(PLAYER_CHANNEL).set_volume(1)
pygame.mixer.set_reserved(OW_CHANNEL)
pygame.mixer.Channel(OW_CHANNEL).
|
set_volume(1)
pygame.mixer.set_reserved(BAAKE_CHANNEL)
pygame.mixer.Chan
|
nel(BAAKE_CHANNEL).set_volume(1)
pygame.mixer.set_reserved(BOSS_CHANNEL)
pygame.mixer.Channel(BOSS_CHANNEL).set_volume(1)
pygame.mixer.set_reserved(PICKUP_CHANNEL)
pygame.mixer.Channel(PICKUP_CHANNEL).set_volume(1)
except:
utility.sound_active = False
print('WARNING! - Sound not initialized.')
pygame.mouse.set_visible(False)
music_list = [
utility.load_sound('menuMusic'),
utility.load_sound('music0'),
utility.load_sound('music1'),
utility.load_sound('music2'),
utility.load_sound('bossMusic')
]
world.load_data()
player.load_data()
bullet.load_data()
pygame.event.pump()
enemies.baake.load_data()
balloon.load_data()
gem.load_data()
pygame.event.pump()
enemies.moono.load_data()
enemies.batto.load_data()
enemies.rokubi.load_data()
pygame.event.pump()
enemies.haoya.load_data()
enemies.yurei.load_data()
enemies.bokko.load_data()
pygame.event.pump()
enemies.hakta.load_data()
enemies.raayu.load_data()
enemies.paajo.load_data()
pygame.event.pump()
enemies.boss.load_data()
particle.load_data()
menu.load_data()
for event in pygame.event.get():
pass
splashscreen.SplashScreen(screen, 'pygamesplash')
utility.play_music(music_list[MENU_MUSIC])
splashscreen.SplashScreen(screen, 'gameSplash')
if settings_list[WORLD_UNLOCKED] == 0:
new_scene = scene.TutorialScene()
elif settings_list[WORLD_UNLOCKED] == 1:
new_scene = scene.ForestScene()
elif settings_list[WORLD_UNLOCKED] == 2:
new_scene = scene.RockyScene()
elif settings_list[WORLD_UNLOCKED] == 3:
new_scene = scene.PinkScene()
game_is_running = True
main_menu_dictionary = {
START_GAME: ('Play', 'Start a New Game'),
OPTION_MENU: ('Options', 'Change Sound and Video Options'),
CREDIT_MENU: ('Credits', 'Who We Are, What We Did'),
EXIT_GAME: ('Exit', 'Exit the Game')
}
world_menu_dictionary = {
TUTORIAL: ('Tutorial', 'Start the Tutorial [Learn]'),
WORLD1: ('Cloudopolis', 'Start Playing Cloudopolis [Apprentice]'),
WORLD2: ('Nightmaria', 'Start Playing Nightmaria [Journeyman]'),
WORLD3: ('Opulent Dream', 'Start Playing Opulent Dream [Master]'),
EXIT_OPTIONS: ('Back', 'Go Back to the Main Menu')
}
option_menu_dictionary = {
SOUND_MENU: ('Sound Options', 'Change Sound Options'),
DISPLAY_MENU: ('Video Options', 'Change Video Options'),
CHANGE_SENSITIVITY: ('Mouse Sensitivity: ' + prettyprint.mouse_sensitivity(settings_list[SENSITIVITY]), 'Change Mouse Sensitivity'),
EXIT_OPTIONS: ('Back', 'Go Back to the Main Menu')
}
sound_menu_dictionary = {
TOGGLE_SFX: ('Sound Effects: ' + prettyprint.on(settings_list[SFX]), 'Turn ' + prettyprint.on(not settings_list[SFX]) + ' Sound Effects'),
TOGGLE_MUSIC: ('Music: ' + prettyprint.on(settings_list[MUSIC]), 'Turn ' + prettyprint.on(not settings_list[MUSIC]) + ' Music'),
EXIT_OPTIONS: ('Back', 'Go Back to the Option Menu')
}
display_menu_dictionary = {
TOGGLE_PARTICLES: ('Particles: ' + prettyprint.able(settings_list[PARTICLES]), 'Turn ' + prettyprint.on(not settings_list[PARTICLES]) + ' Particle Effects'),
TOGGLE_FULLSCREEN: ('Video Mode: ' + prettyprint.screen_mode(settings_list[SETTING_FULLSCREEN]), 'Switch To ' + prettyprint.screen_mode(not settings_list[SETTING_FULLSCREEN]) + ' Mode'),
EXIT_OPTIONS: ('Back', 'Go Back to the Main Menu')
}
sensitivity_menu_dictionary = {
0: ('Very Low', 'Change Sensitivity to Very Low'),
1: ('Low', 'Change Sensitivity to Low'),
2: ('Normal', 'Change Sensitivity to Normal'),
3: ('High', 'Change Sensitivity to High'),
4: ('Very High', 'Change Sensitivity to Very High')
}
menu_bounds = (0, SCREEN_HEIGHT / 3, SCREEN_WIDTH, SCREEN_HEIGHT)
while game_is_running:
menu_result = menu.Menu(screen,
music_list[MENU_MUSIC],
new_scene,
menu_bounds,
('Trouble in Cloudland', 80, SCREEN_WIDTH / 2, SCREEN_HEIGHT / 4),
main_menu_dictionary).show()
if menu_result == START_GAME:
last_highlighted = settings_list[WORLD_UNLOCKED]
world_result = menu.Menu(screen,
music_list[MENU_MUSIC],
new_scene,
menu_bounds,
('Choose a World', 96, SCREEN_WIDTH / 2, SCREEN_HEIGHT / 4),
world_menu_dictionary,
last_highlighted).show()
if world_result == TUTORIAL:
game.Game(screen, 0, music_list).run()
elif world_result == EXIT_OPTIONS:
world_result = False
elif world_result is not False:
utility.fade_music()
utility.play_music(music_list[world_result - 1], True)
game.Game(screen, world_result - 1, music_list).run()
elif menu_result == OPTION_MENU:
option_result = True
last_highlighted = 0
while option_result:
option_result = menu.Menu(screen,
music_list[MENU_MUSIC],
new_scene,
menu_bounds,
('Options', 96, SCREEN_WIDTH / 2, SCREEN_HEIGHT / 4),
option_menu_dictionary,
last_highlighted).show()
if option_result == SOUND_MENU:
sound_result = True
last_highlighted = 0
while sound_result:
sound_result = menu.Menu(screen,
music_list[MENU_MUSIC],
new_scene,
menu_bounds,
('Sound Options', 96, SCREEN_WIDTH / 2, SCREEN_HEIGHT / 4),
sound_menu_dictionary,
last_highlighted).show()
if sound_result == TOGGLE_SFX:
settings_list[SFX] = not settings_list[SFX]
last_highlighted = 0
elif sound_result == TOGGLE_MUSIC:
settings_list[MUSIC] = not settings_list[MUSIC]
if not settings_list[MUSIC]:
pygame.mixer.Channel(MUSIC_CHANNEL).stop()
last_highlighted = 1
elif sound_result == EXIT_OPTIONS:
sound_result = False
sound_menu_dictionary = {
TOGGLE_SFX: ('Sound Effects: ' + prettyprint.on(settings_list[SFX]), 'Turn ' + prettyprint.on(not settings_list[SFX]) + ' Sound Ef
|
mitmedialab/MediaCloud-Web-Tools
|
server/views/topics/topiclist.py
|
Python
|
apache-2.0
| 4,869
| 0.002875
|
import flask_login
import logging
from flask import jsonify, request
from server import app, user_db
from server.auth import user_mediacloud_client, user_name, user_admin_mediacloud_client,\
user_is_admin
from server.util.request import form_fields_required, arguments_required, api_error_handler
logger = logging.getLogger(__name__)
@app.route('/api/topics/search', methods=['GET'])
@flask_login.login_required
@arguments_required('searchStr')
@api_error_handler
def topic_search():
search_str = request.args['searchStr']
mode = request.args['mode'] if 'mode' in request.args else 'list'
user_mc = user_admin_mediacloud_client()
results = user_mc.topicList(name=search_str, limit=50)
if mode == 'full':
matching_topics = results['topics']
else:
# matching_topics = [{'name': x['name'], 'id': x['topics_id']} for x in results['topics']]
matching_topics = results['topics']
return jsonify({'topics': matching_topics})
@app.route('/api/topics/admin/list', methods=['GET'])
@flask_login.login_required
@api_error_handler
def topic_admin_list():
user_mc = user_admin_mediacloud_client()
# if a non-admin user calls this, using user_mc grantees this won't be a security hole
# but for admins this will return ALL topics
topics = user_mc.topicList(limit=500)['topics']
# we also want snapshot info
# topics = _add_snapshots_info_to_topics(topics)
topics = sorted(topics, key=lambda t: t['topics_id'], reverse=True)
return jsonify(topics)
@app.route('/api/topics/favorites', methods=['GET'])
@flask_login.login_required
@api_error_handler
def topic_favorites():
user_mc = user_mediacloud_client()
favorite_topic_ids = user_db.get_users_lists(user_name(), 'favoriteTopics')
favorited_topics = [user_mc.topic(tid) for tid in favorite_topic_ids]
for t in favorited_topics:
t['isFavorite'] = True
retu
|
rn jsonify({'topics': favorited_topics})
@app.route('/api/topics/queued-and-running', methods=['GET'])
@flask_login.login_required
@api_error_handler
def does_user_have_a_running_topic():
# save a costly set of paging queries when the user is admin
if user_is_admin():
return jsonify([])
# non-admin, so do the real check
user_mc = user_mediacloud_client()
|
queued_and_running_topics = []
more_topics = True
link_id = None
while more_topics:
results = user_mc.topicList(link_id=link_id, limit=100)
topics = results['topics']
queued_and_running_topics += [t for t in topics if t['state'] in ['running', 'queued']
and t['user_permission'] in ['admin']]
more_topics = 'next' in results['link_ids']
if more_topics:
link_id = results['link_ids']['next']
return jsonify(queued_and_running_topics)
def topics_user_can_access(topics, user_email, is_admin):
# we can't see all the permissions for a topic in topicList results, so we have to use some guesses here.
# pull out just the topics this user has permissions for (ie. remove public ones they don't own)
user_topics = []
for t in topics:
user_is_owner = user_email in [o['email'] for o in t['owners']]
# admins can see all topics, so to make this more manageable only show admins ones they own
ok_to_show = user_is_owner if is_admin else user_is_owner or (not t['is_public'])
if ok_to_show:
user_topics.append(t)
return user_topics
@app.route('/api/topics/personal', methods=['GET'])
@flask_login.login_required
@api_error_handler
def topic_personal():
user_mc = user_mediacloud_client()
link_id = request.args.get('linkId')
results = user_mc.topicList(link_id=link_id, limit=1000)
user_accessible_topics = topics_user_can_access(results['topics'], flask_login.current_user.profile['email'],
user_is_admin())
# update this in place so the results['link_ids'] don't change (for paging support)
results['topics'] = add_user_favorite_flag_to_topics(user_accessible_topics)
return jsonify(results)
@app.route('/api/topics/<topics_id>/favorite', methods=['PUT'])
@flask_login.login_required
@form_fields_required('favorite')
@api_error_handler
def topic_set_favorited(topics_id):
favorite = int(request.form["favorite"])
username = user_name()
if favorite == 1:
user_db.add_item_to_users_list(username, 'favoriteTopics', int(topics_id))
else:
user_db.remove_item_from_users_list(username, 'favoriteTopics', int(topics_id))
return jsonify({'isFavorite': favorite == 1})
def add_user_favorite_flag_to_topics(topics):
user_favorited = user_db.get_users_lists(user_name(), 'favoriteTopics')
for t in topics:
t['isFavorite'] = t['topics_id'] in user_favorited
return topics
|
jpzm/bw
|
__init__.py
|
Python
|
gpl-2.0
| 1,267
| 0
|
# vim: set fileencoding=utf-8 :
# Copyright (C) 2008 Joao Paulo de Souza Medeiros
#
# Author(s): Joao Paulo de Souza Medeiros <ignotus21@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from boxes import BWHBox, BWVBox, BWTable, BWStatusbar, BWScrolledW
|
indow
from buttons import BWStockButton, BWToggleStockButton
from comboboxes import BWChangeableComboBoxEntry
from expanders import BWExpander
from frames import BWFrame
from notebooks import BWNotebook
from labels import BWLabel, BWSectionLabel
from textview import BWTextView, B
|
WTextEditor
from windows import BWWindow, BWMainWindow, BWAlertDialog
|
wangjun/pyload
|
module/plugins/crypter/YoutubeBatch.py
|
Python
|
gpl-3.0
| 6,087
| 0.003286
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: Walter Purcaro
"""
from urlparse import urljoin
import re
from module.common.json_layer import json_loads
from module.plugins.Crypter import Crypter
from module.utils import save_join
API_KEY = "AIzaSyCKnWLNlkX-L4oD1aEzqqhRw1zczeD6_k0"
class YoutubeBatch(Crypter):
__name__ = "YoutubeBatch"
__type__ = "crypter"
__pattern__ = r"https?://(?:www\.)?(m\.)?youtube\.com/(?P<TYPE>user|playlist|view_play_list)(/|.*?[?&](?:list|p)=)(?P<ID>[\w-]+)"
__version__ = "1.00"
__description__ = """Youtube.com channel & playlist decrypter"""
__config__ = [("likes", "bool", "Grab user (channel) liked videos", "False"),
("favorites", "bool", "Grab user (channel) favorite videos", "False"),
("uploads", "bool", "Grab channel unplaylisted videos", "True")]
__author_name__ = ("Walter Purcaro")
__author_mail__ = ("vuolter@gmail.com")
def api_response(self, ref, req):
req.update({"key": API_KEY})
url = urljoin("https://www.googleapis.com/youtube/v3/", ref)
page = self.load(url, get=req)
return json_loads(page)
def getChannel(self, user):
channels = self.api_response("channels", {"part": "id,snippet,contentDetails", "forUsername": user, "maxResults": "50"})
if channels["items"]:
channel = channels["items"][0]
return {"id": channel["id"],
"title": channel["snippet"]["title"],
"relatedPlaylists": channel["contentDetails"]["relatedPlaylists"],
"user": user} # One lone channel for user?
def getPlaylist(self, p_id):
playlists = self.api_response("playlists", {"part": "snippet", "id": p_id})
if playlists["items"]:
playlist = playlists["items"][0]
return {"id": p_id,
"title": playlist["snippet"]["title"],
"channelId": playlist["snippet"]["channelId"],
"channelTitle": playlist["snippet"]["channelTitle"]}
def _getPlaylists(self, id, token=None):
req = {"part": "id", "maxResults": "50", "channelId": id}
if token:
req.update({"pageToken": token})
playlists = self.api_response("playlists", req)
for playlist in playlists["items"]:
yield playlist["id"]
if "nextPageToken" in playlists:
for item in self._getPlaylists(id, playlists["nextPageToken"]):
yield item
def getPlaylists(self, ch_id):
return map(self.getPlaylist, self._getPlaylists(ch_id))
def _getVideosId(self, id, token=None):
req = {"part": "contentDetails", "maxResults": "50", "playlistId": id}
if token:
req.update({"p
|
ageToken": token})
playlist = self.api_
|
response("playlistItems", req)
for item in playlist["items"]:
yield item["contentDetails"]["videoId"]
if "nextPageToken" in playlist:
for item in self._getVideosId(id, playlist["nextPageToken"]):
yield item
def getVideosId(self, p_id):
return list(self._getVideosId(p_id))
def decrypt(self, pyfile):
match = re.match(self.__pattern__, pyfile.url)
m_id = match.group("ID")
m_type = match.group("TYPE")
if m_type == "user":
self.logDebug("Url recognized as Channel")
user = m_id
channel = self.getChannel(user)
if channel:
playlists = self.getPlaylists(channel["id"])
self.logDebug("%s playlist\s found on channel \"%s\"" % (len(playlists), channel["title"]))
relatedplaylist = {p_name: self.getPlaylist(p_id) for p_name, p_id in channel["relatedPlaylists"].iteritems()}
self.logDebug("Channel's related playlists found = %s" % relatedplaylist.keys())
relatedplaylist["uploads"]["title"] = "Unplaylisted videos"
relatedplaylist["uploads"]["checkDups"] = True #: checkDups flag
for p_name, p_data in relatedplaylist.iteritems():
if self.getConfig(p_name):
p_data["title"] += " of " + user
playlists.append(p_data)
else:
playlists = []
else:
self.logDebug("Url recognized as Playlist")
playlists = [self.getPlaylist(m_id)]
if not playlists:
self.fail("No playlist available")
addedvideos = []
urlize = lambda x: "https://www.youtube.com/watch?v=" + x
for p in playlists:
p_name = p["title"]
p_videos = self.getVideosId(p["id"])
p_folder = save_join(self.config['general']['download_folder'], p["channelTitle"], p_name)
self.logDebug("%s video\s found on playlist \"%s\"" % (len(p_videos), p_name))
if not p_videos:
continue
elif "checkDups" in p:
p_urls = [urlize(v_id) for v_id in p_videos if v_id not in addedvideos]
self.logDebug("%s video\s available on playlist \"%s\" after duplicates cleanup" % (len(p_urls), p_name))
else:
p_urls = map(urlize, p_videos)
self.packages.append((p_name, p_urls, p_folder)) #: folder is NOT recognized by pyload 0.4.9!
addedvideos.extend(p_videos)
|
valentin-krasontovitsch/ansible
|
lib/ansible/modules/cloud/openstack/_os_server_actions.py
|
Python
|
gpl-3.0
| 533
| 0.003752
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
|
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_M
|
ETADATA = {'metadata_version': '1.1',
'status': ['removed'],
'supported_by': 'community'}
from ansible.module_utils.common.removed import removed_module
if __name__ == '__main__':
removed_module(removed_in='2.8')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.