repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
kate-harrison/west
|
west/data_management.py
|
Python
|
gpl-2.0
| 23,067
| 0.00065
|
from abc import ABCMeta, abstractmethod, abstractproperty
from ruleset import Ruleset
from device import Device
from propagation_model import PropagationModel
from region import Region
from boundary import Boundary
from data_map import DataMap2D, DataMap3D, DataMap2DWithFixedBoundingBox
from population import PopulationData
from custom_logging import getModuleLogger
import os
import textwrap
from configuration import base_data_directory
def _is_class(obj):
"""Returns True if ``obj`` is a class and False if it is an instance."""
return issubclass(obj.__class__, type)
def _is_object(obj):
"""Returns True if ``obj`` is an instance and False if it is a class."""
return not _is_class(obj)
def _make_string(obj):
def obj_belongs_to(class_object):
return (_is_class(obj) and issubclass(obj, class_object)) or (
_is_object(obj) and isinstance(obj, class_object))
def get_class_name():
if _is_class(obj):
return obj.__name__
else:
return obj.__class__.__name__
if obj_belongs_to(Ruleset) or obj_belongs_to(PropagationModel) or \
obj_belongs_to(Boundary) or obj_belongs_to(DataMap2D):
return get_class_name()
elif obj_belongs_to(Device):
if _is_class(obj):
raise TypeError("Expected an actual Device object.")
else:
if obj.is_portable():
return "Device(portable)"
else:
return "Device(fixed,HAAT=%d)" % obj.get_haat()
elif obj_belongs_to(Region):
return get_class_name()
class Specification(object):
"""
A Specification is the minimum amount of information needed to describe a
set of data. A Specification can be used to create data, fetch data,
and automatically map data.
Specifications are best-effort data caches which are meant to aid in data
generation and organization.
Guiding principles:
* The user is responsible for cache invalidation.
* A best-effort attempt at avoiding naming collisions has been made but
nothing should be considered certain.
* When possible, load data from disk. When not possible, generate the
data, save it, and then load it from disk.
* When possible, allow the user to specify either a class name or an
instance of the class. If an instance is specified, that instance
will be used if an instance is needed. Otherwise, an instance will
be created only when it becomes necessary for data generation.
Notes for extending this class:
* Become familiar with the use of the many helper functions. See e.g. the
init function for :class:`SpecificationWhitespaceMap` for example
usage.
* Be sure that :meth:`make_data` returns the data in addition to saving
it.
* Implement :meth:`get_map` if possible.
* Filenames should not exceed 255 characters in order to be compatible
with common file systems.
"""
__metaclass__ = ABCMeta
@abstractmethod
def to_string(self):
"""Returns the string representation of the Specification."""
pass
@abstractproperty
def subdirectory(self):
"""Returns a string with the name of the data subdirectory to be used
for storing the data created by this Specification."""
pass
@abstractmethod
def make_data(self):
"""
Creates the data based on the information in the Specification. Must
both save and return created data.
See also: :meth:`save_data`.
"""
pass
def get_map(self):
"""Optionally-implemented method which will create the default map for
the Specification."""
raise NotImplementedError("")
def _get_datamap_spec(self):
"""If possible, returns the internal :class:`SpecificationDataMap`. To
succeed, the Specification must satisfy at least one of the following:
* Be a SpecificationDataMap
* Have an attribute "datamap_spec" which is a SpecificationDataMap
object
* Have an attribute "region_map_spec" which is a
SpecificationRegionMap object
Raises an AttributeError if no SpecificationDataMap is found.
"""
if isinstance(self, SpecificationDataMap):
return self
if hasattr(self, "datamap_spec"):
return self.datamap_spec
if hasattr(self, "region_map_spec"):
return self.region_map_spec._get_datamap_spec()
raise AttributeError("No datamap specification found (expected to "
"find one of the following attributes: "
"datamap_spec, region_map_spec")
def _convert_to_class_and_object(self, var_name, obj,
may_create_new_objects=True, **kwargs):
"""
Sets the internal variables [var_name]_class, [var_name]_object based on
``obj``. ``obj`` may be either a class or an instance of a class.
If ``obj`` is a class, the object will be created only if
``may_create_new_objects`` is True. In that case, the keyword
arguments are passed to the constructor.
If ``obj`` is an instance, that instance will be used.
"""
if _is_class(obj):
setattr(self, var_name + "_class", obj)
if may_create_new_objects:
setattr(self, var_name + "_object", obj(**kwargs))
else:
setattr(self, var_name + "_object", obj)
setattr(self, var_name + "_class", obj.__class__)
def _boundary_to_class_and_object(self, boundary):
self._convert_to_class_and_object("boundary", boundary)
def _region_to_class_and_object(self, region):
self._convert_to_class_and_object("region", region)
def _ruleset_to_class_and_object(self, ruleset):
self._convert_to_class_and_object("ruleset", ruleset)
def _propagation_model_to_class_and_object(self, propagation_model):
self._convert_to_class_and_object("propagation_model",
propagation_model)
def _store_at_least_class(self, var_name, obj):
"""Stores at minimum the class of ``obj``. If ``obj`` is an instance
(rather than a class), ``obj`` will be stored as well."""
self._convert_to_class_and_object(var_name, obj,
may_create_new_objects=False)
def _create_obj_if_needed(self, var_name, **kwargs):
"""If [var_name]_object does not exist, create it. In that case, the
keyword arguments are passed to the constructor."""
if hasattr(self, var_name + "_object"):
return
obj_class = getattr(self, var_name + "_class")
setattr(self, var_name + "_object", obj_class(**kwargs))
def _expect_of_type(self, obj, expected_types):
"""Raise a TypeError if ``obj`` is neither a subclass nor an instance of
one of the expected types.
expected_types may be either a list or a singleton."""
if not isinstance(expected_types, list):
expected_types = [expected_types]
for e_type in expected_types:
i
|
f not _is_class(
|
e_type):
raise TypeError("Expected type must be a class (got '%s' "
"instead)." % str(expected_types))
if _is_class(obj):
cls = obj
else:
cls = obj.__class__
is_wrong_type = True
for e_type in expected_types:
if issubclass(cls, e_type):
is_wrong_type = False
if is_wrong_type:
raise TypeError("Expected something of a type in %s (either a "
"class or object) but received something of "
"type %s." % (str(expected_types), cls.__name__))
def _expect_is_object(self, obj):
"""Raise a TypeError if ``obj`` is not an instance."""
if not _is_object(obj):
raise TypeError("Expected to receive an instance and instead "
|
richardliaw/ray
|
rllib/tests/test_local.py
|
Python
|
apache-2.0
| 696
| 0
|
import unittest
import ray
from ray.rllib
|
.agents.pg import PGTrainer, DEFAULT_CONFIG
from ray.rllib.utils.test_utils import framework_iterator
class LocalModeTest(unittest.TestCase):
def setUp(self) -> None:
ray.init(local_mode=True)
def tearDown(s
|
elf) -> None:
ray.shutdown()
def test_local(self):
cf = DEFAULT_CONFIG.copy()
cf["model"]["fcnet_hiddens"] = [10]
cf["num_workers"] = 2
for _ in framework_iterator(cf):
agent = PGTrainer(cf, "CartPole-v0")
print(agent.train())
agent.stop()
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
|
axxiao/toby
|
ax/wrapper/sqlalchemy.py
|
Python
|
mit
| 1,716
| 0.004662
|
"""
The wrapper for Postgres through SQLAchemy
__author__ = "Alex Xiao <http://www.alexxiao.me/>"
__date__ = "2018-11-03"
__version__ = "0.1"
Version:
0.1 (03/11/2018 AX) : init
"""
from urllib.parse import quote_plus
from sqlalchemy import create_engine, text
import pandas
from ax.log import get_logger
class Connection:
"""
Base Class for all SQL Alchemy Connection
"""
def __init__(self, user, password, logger_name='Toby.DB', db_type='postgresql+psycopg2', host='localhost',
port=5432, db='toby', encoding='utf8'):
self._connection = None
self._uri = None
self._encoding = encoding
self.logger = get_logger(logger_name)
self.connect(db_type, user, password, host, port, db, encoding)
def connect(self, db_type, user, password, host='localhost', port=5432,
|
db='toby', encoding='utf8'):
self._uri = '{}://{}:{}@{}:{}/{}'
if not self._connection or self._connection.closed:
self._connection = create_engine(self._uri.format(db_type, quote_plus(user), quote_plus(password), host,
port, db), client_encoding=encoding).connect()
def disconnect(self,):
self._connection.close()
def reconnect(self,):
if
|
self._connection.closed:
self._connection = create_engine(self._uri, client_encoding=self._encoding).connect()
def query(self, sql, **options):
return pandas.read_sql(text(sql), self._connection, **options)
def execute(self, sql):
self.logger.info('Executing:' + sql)
self._connection.execute(text(sql))
self.logger.info('Done')
|
1ookup/RATDecoders
|
BlueBanana.py
|
Python
|
gpl-2.0
| 3,304
| 0.030266
|
#!/usr/bin/env python
'''
BlueBanana Rat Config Decoder
'''
__description__ = 'BlueBanana Rat Config Extractor'
__author__ = 'Kevin Breen http://techanarchy.net http://malwareconfig.com'
__version__ = '0.1'
__date__ = '2014/04/10'
#Standard Imports Go Here
import os
import sys
import string
from zipfile import ZipFile
from cStringIO import StringIO
from optparse import OptionParser
#Non Standard Imports
try:
from Crypto.Cipher import AES
except ImportError:
print "[+] Couldn't Import Cipher, try 'sudo pip install pycrypto'"
# Main Decode Function Goes Here
'''
data is a read of the file
Must return a python dict of values
'''
def run(data):
newZip = StringIO(data)
with ZipFile(newZip) as zip:
for name in zip.namelist(): # get all the file names
if name == "config.txt": # this file contains the encrypted config
conFile = zip.read(name)
if conFile: #
confRaw = decryptConf(conFile)
conf = configParse(confRaw)
return conf
#Helper Functions Go Here
def DecryptAES(enckey, data):
cipher = AES.new(enckey) # set the cipher
return cipher.decrypt(data) # decrpyt the data
def decryptConf(conFile):
key1 = "15af8sd4s1c5s511"
key2 = "4e3f5a4c592b243f"
first = DecryptAES(key1, conFile.decode('hex'))
second = DecryptAES(key2, first[:-16].decode('hex'))
return second
def configParse(confRaw):
config = {}
clean = filter(lambda x: x in string.printable, confRaw)
list = clean.split("<separator>")
config["Domain"] = list[0]
config["Password"] = list[1]
config["Port1"] = list[2]
config["Port2"] = list[3]
if len(list) > 4:
config["Install Name"] = list[4]
config["Jar Name"] = list[5]
return config
#Recursive Function Goes Here
# Main
if __name__ == "__main__":
parser = OptionParser(usage='usage: %prog inFile outConfig\n' + __description__, version='%prog ' + __version__)
parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode")
(options, args) = parser.parse_args()
# If we dont have args quit with help page
if len(args) > 0:
pass
else:
parser.print_help()
sys.exit()
# if we want a recursive extract run this function
if options.recursive == True:
print "[+] Sorry Not Here Yet Come Back Soon"
# If not recurisve try to open file
try:
print "[+] Reading file"
fileData = open(args[0], 'rb').read()
except:
print "[+] Couldn't Open File {0}".format(args[0])
#Run the config extraction
print "[+] Searching for Config"
config = run(fileData)
#If we have a co
|
nfig figure out where to dump it out.
if config == None:
print "[+] Config not found"
sys.exit()
#if you gave me two args im going to assume the 2nd arg is where you want to save the file
if len(args) == 2:
print "[+] Writing Config to file {0}".format(args[1])
with open(args[1], 'a') as outFile:
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
outFile.write("Key: {0}\t
|
Value: {1}\n".format(key,clean_value))
# if no seconds arg then assume you want it printing to screen
else:
print "[+] Printing Config to screen"
for key, value in sorted(config.iteritems()):
clean_value = filter(lambda x: x in string.printable, value)
print " [-] Key: {0}\t Value: {1}".format(key,clean_value)
print "[+] End of Config"
|
marook/minecraft-world-io
|
src/test/marook_test/minecraft_test/tag_test/test_entities.py
|
Python
|
gpl-3.0
| 1,426
| 0.002805
|
#!/usr/bin/env python
#
# Copyright 2011 Markus Pielmeier
#
# This file is part of minecraft-world-io.
#
# minecraft-world-io is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# minecraft-world-io is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with minecraft-world-io. If not, see <http://www.gnu.org/licenses/>.
#
import os
import unittest
from marook.minecraft.tag.en
|
tities import SkeletonParser
from marook.minecraft.tag.entities import SheepParser
class SkeletonParserTest(unittest.TestCase):
def testParseSkeleton(self):
with open(os.path.join('etc', 'dumps', 'skeleton.dump'), 'r') as f:
p = SkeletonParser()
s = p.readEntity(f)
# TODO check skeleton attributes
class SheepParserTest(unitte
|
st.TestCase):
def testParseSheep(self):
with open(os.path.join('etc', 'dumps', 'sheep.dump'), 'r') as f:
p = SheepParser()
s = p.readEntity(f)
# TODO check sheep attributes
|
rokuz/omim
|
tools/android/set_up_android.py
|
Python
|
apache-2.0
| 3,533
| 0.016417
|
#!/usr/bin/python
import os, sys, shutil, collections
from optparse import OptionParser
# Fix for python 2
try:
input = raw_input
except NameError:
pass
def find_recursive(root, subpath, maxdepth=4):
queue = collections.deque([(root, 0)])
if 'PATH' in os.environ:
envpath = os.environ['PATH'].split(':')
relpath = ['..'] * (len(subpath) - 1)
queue.extendleft([(os.path.join(x, *relpath), maxdepth) for x in envpath if 'android' in x.lower()])
while len(queue) > 0:
item = queue.popleft()
if os.path.isfile(os.path.join(item[0], *subpath)):
return os.path.abspath(item[0])
if item[1] < maxdepth:
for name in os.listdir(item[0]):
fullname = os.path.join(item[0], name)
if os.path.isdir(fullname) and '.' not in name:
queue.append((fullname, item[1] + 1))
return None
def read_local_properties():
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
sdkDir = None
ndkDir = None
if os.path.exists(propsFile):
with open(propsFile, 'r') as f:
for line in f:
line = line.strip()
if line.startswith('sdk.dir') and '=' in line:
sdkDir = line.split('=')[1].strip()
elif line.startswith('ndk.dir') and '=' in line:
ndkDir = line.split('=')[1].strip()
return (sdkDir, ndkDir)
def query_path(title, option, default, subpath):
default = '' if not default else os.path.abspath(default)
searchHint = ', "s" to search'
while True:
path = input('Path to {0}{1} [{2}]:'.format(title, searchHint, default)) or default
if len(searchHint) > 0 and path.lower().strip() == 's':
found = find_recursive(os.path.expanduser('~'), subpath)
if found:
default = found
searchHint = ''
else:
break
test = os.path.join(path, *subpat
|
h)
if path and os.path.isfi
|
le(test):
return os.path.abspath(path)
else:
print('Could not find {0}, not an {1} path.'.format(test, title))
sys.exit(1)
def write_local_properties(sdkDir, ndkDir):
content = ''.join([x + '\n' for x in [
'# Autogenerated file',
'# Do not add it to version control',
'sdk.dir={0}'.format(sdkDir),
'ndk.dir={0}'.format(ndkDir)
]])
# Create omim/android/local.properties
androidRoot = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'android')
propsFile = os.path.join(androidRoot, 'local.properties')
print('Writing {0}'.format(propsFile))
with open(propsFile, 'w') as f:
f.write(content)
# Copy files to folders
for folder in ['YoPme', 'YoPme2', 'UnitTests']:
destFolder = os.path.join(androidRoot, folder)
if not os.path.exists(destFolder):
os.makedirs(destFolder)
dst = os.path.join(destFolder, 'local.properties')
print('Copying to {0}'.format(dst))
shutil.copy(propsFile, dst)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('-s', '--sdk', help='Path to Android SDK')
parser.add_option('-n', '--ndk', help='Path to Android NDK')
options, _ = parser.parse_args()
sdkDir = options.sdk
ndkDir = options.ndk
if not options.sdk or not options.ndk:
sdkDirOld, ndkDirOld = read_local_properties()
if not sdkDir:
sdkDir = sdkDirOld
if not ndkDir:
ndkDir = ndkDirOld
sdkDir = query_path('Android SDK', options.sdk, sdkDir, ['platform-tools', 'adb'])
ndkDir = query_path('Android NDK', options.ndk, ndkDir, ['ndk-build'])
write_local_properties(sdkDir, ndkDir)
|
getnikola/nikola
|
nikola/plugins/task/copy_files.py
|
Python
|
mit
| 2,163
| 0.000463
|
# -*- coding: utf-8 -*-
# Copyright © 2012-2022 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFT
|
WARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Copy static files into the output folder."""
import os
from nikola.plugin_categories import Task
from nikola import utils
class CopyFiles(Task):
"""Copy static files into the output folder."""
name = "copy_files"
def gen_tasks(self):
"""Copy static files into the output folder."""
kw = {
'files_folders': self.site.config['FILES_FOLDERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
}
yield self.group_task()
for src in kw['files_folders']:
dst = kw['output_folder']
filters = kw['filters']
real_dst = os.path.join(dst, kw['files_folders'][src])
for task in utils.copy_tree(src, real_dst, link_cutoff=dst):
task['basename'] = self.name
task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.copy_files')]
yield utils.apply_filters(task, filters, skip_ext=['.html'])
|
gemelkelabs/timing_system_software
|
server_py_files/utilities/timing_diagram.py
|
Python
|
mit
| 5,170
| 0.00793
|
import pdb
class TimingDiagram:
def print_diagram(self, xtsm_object):
pdb.set_trace()
seq = xtsm_object.XTSM.getActiveSequence()
cMap=seq.getOwnerXTSM().getDescendentsByType("ChannelMap")[0]
#channelHeir=cMap.createTimingGroupHeirarchy()
#channelRes=cMap.findTimingGroupResolutions()
#Parser out put node. Use TimingProffer
#Control arrays hold what is actually coming out.
seq.collectTimingProffers()
edge_timings = seq.TimingProffer.data['Edge']
class Edge:
def __init__(self, timing_group, channel_number, time, value, tag,
name, initial_value, holding_value):
self.timing_group = timing_group
self.channel_number = channel_number
self.time = time
self.value = value
self.tag = tag
self.max = 0
self.min = 0
self.name = name
self.holding_value = holding_value
self.initial_value = initial_value
def is_same(self,edge):
if ((self.timing_group == edge.timing_group) and
(self.channel_number == edge.channel_number) and
(self.time == edge.time) and
(self.value == edge.value) and
(self.tag == edge.tag)):
return True
else:
return False
edges = []
longest_name = 0
for edge in edge_timings:
for channel in cMap.Channel:
tgroup = int(channel.TimingGroup.PCDATA)
tgroupIndex = int(channel.TimingGroupIndex.PCDATA)
if tgroup == int(edge[0]) and tgroupIndex == int(edge[1]):
name = channel.ChannelName.PCDATA
init_val = ''
hold_val = ''
try:
init_val = channel.InitialValue.PCDATA
except AttributeError:
init_val = 'None '
try:
hold_val = channel.HoldingValue.PCDATA
except AttributeError:
hold_val = 'None '
if len(name) > longest_name:
longest_name = len(name)
edges.append(Edge(edge[0],edge[1],edge[2],edge[3],edge[4],
name, init_val,hold_val))
#pdb.set_trace()
unique_group_channels = []
for edge in edges:
is_found = False
for ugc in unique_group_channels:
if edge.is_same(ugc):
is_found = True
if not is_found:
unique_group_channels.append(edge)
from operator import itemgetter
edge_timings_by_group = sorted(edge_timings, key=itemgetter(2))
edge_timings_by_group_list = []
for edge in edge_timings_by_group:
edge_timings_by_group_list.append(edge.tolist())
#print edge_timings
for p in edge_timings_by_group_list: print p
unique_times = []
for edge in edges:
is_found = False
for t in unique_times:
if edge.time == t.time:
is_found = True
if not is_found:
unique_times.append(edge)
#pdb.set_trace()
for ugc in unique_group_channels:
s = ugc.name.rjust(longest_name)
current_edge = edges[0]
previous_edge = edges[0]
is_first = True
for t in unique_times:
is_found = False
for edge in edges:
if edge.timing_group == ugc.timing_group and edge.channel_number == ugc.channel_number and edge.time == t.time:
is_found = True
current_edge = edge
if is_first:
s = s + '|' + str('%7s' % str(current_edge.initial_value))
is_first = False
previous_edge.value = current_edge.initial_value
if previous_edge.value == '
|
None ':
previous_edge.value = 0
if is_found:
if current_edge.value > previous_edge.value:
s += '^' + str('%7s' % str(current_edge.value))
else:
s += 'v' + str('%7s' % str(current_edge.value))
|
previous_edge = current_edge
else:
s += '|' + '.'*7
s = s + '|' + str('%7s' % str(current_edge.holding_value))
print s
s = "Time (ms)".rjust(longest_name) + '|' + str('%7s' % str("Initial"))
for t in unique_times:
s += '|' + str('%7s' % str(t.time))
s = s + '|' + str('%7s' % str("Holding"))
print s
|
wolverineav/neutron
|
neutron/_i18n.py
|
Python
|
apache-2.0
| 1,355
| 0
|
# All Rights Reserved.
#
# Licensed under the Apache Licens
|
e, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required
|
by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "neutron"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
|
cts-admin/cts
|
cts/home/migrations/0005_auto_20170524_0700.py
|
Python
|
gpl-3.0
| 708
| 0.001412
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-24 07:00
from __future__ import unicode_literals
from django.db import migrations
import wa
|
gtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20170524_0608'),
]
operations = [
migrations.RemoveField(
model_name='homepage',
name='content',
),
migrations.AddField(
model_name='homepage',
name='body',
field=wagtail.core.fields.StreamField((('motto', wagtail.core.blo
|
cks.CharBlock()), ('paragraph', wagtail.core.blocks.RichTextBlock())), blank=True),
),
]
|
geekoftheweek/disk-treemap
|
treemap.py
|
Python
|
mit
| 2,734
| 0.001829
|
#!/usr/bin/env python
'''
Creates an html treemap of disk usage, using the Google Charts API
'''
import json
import os
import subprocess
import sys
def memoize(fn):
stored_results = {}
def memoized(*args):
try:
return stored_results[args]
except KeyError:
result = stored_results[args] = fn(*args)
return result
return memoized
@memoize
def get_folder_size(folder):
total_size = os.path.getsize(folder)
for item in os.listdir(folder):
itempath = os.path.join(folder, item)
if os.path.isfile(itempath):
total_size += os.path.getsize(itempath)
elif os.path.isdir(itempath):
total_size += get_folder_size(itempath)
return total_size
def usage_iter(root):
root = os.path.abspath(root)
root_size = get_folder_size(root)
root_string = "{0}\n{1}".format(root, root_size)
yield [root_string, None, root_size]
for parent, dirs, files in os.walk(root):
for dirname in dirs:
fullpath = os.path.join(parent, dirname)
try:
this_size = get_folder_size(fullpath)
parent_size = get_folder_size(parent)
this_string = "{0}\n{1}".format(fullpath, this_size)
parent_string = "{0}\n{1}".format(parent, parent_size)
yield [this_string, parent_string, this_size]
except OSError:
continue
def json_usage(root):
root = os.path.abspath(root)
result = [['Path', 'Parent', 'Usage']]
result.extend(entry for entry in usage_iter(r
|
oot))
return json.dumps(result)
def main(args):
'''Populates an html template using JSON-formatted output from the
Linux 'du' utility and prints the result'''
html = '''
<html>
<head>
<script type="text/javascript"
|
src="https://www.google.com/jsapi"></script>
<script type="text/javascript">
google.load("visualization", "1", {packages:["treemap"]});
google.setOnLoadCallback(drawChart);
function drawChart() {
// Create and populate the data table.
var data = google.visualization.arrayToDataTable(%s);
// Create and draw the visualization.
var tree = new google.visualization.TreeMap(document.getElementById('chart_div'));
tree.draw(data, { headerHeight: 15, fontColor: 'black' });
}
</script>
</head>
<body>
<div id="chart_div" style="width: 900px; height: 500px;"></div>
<p style="text-align: center">Click to descend. Right-click to ascend.</p>
</body>
</html>
''' % json_usage(args[0])
# ''' % du2json(get_usage(args[0]))
print html
if __name__ == "__main__":
main(sys.argv[1:] or ['.'])
|
GNOME/orca
|
test/keystrokes/firefox/ui_role_tree.py
|
Python
|
lgpl-2.1
| 5,957
| 0.002686
|
#!/usr/bin/python
"""Test of tree output using Firefox."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(PauseAction(3000))
sequence.append(KeyComboAction("<Alt>b"))
sequence.append(KeyComboAction("Return"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Tab"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Up"))
sequence.append(KeyComboAction("Tab"))
sequence.append(PauseAction(3000))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>Tab"))
sequence.append(utils.AssertPresentationAction(
"1. Shift Tab for tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"2. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"3. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'collapsed.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"4. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'collapsed tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Right"))
sequence.append(utils.AssertPresentationAction(
"5. Right Arrow to expand fol
|
der",
["BRAILLE LINE: 'Firefox application Library frame Book
|
marks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'expanded'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"6. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu tree item.'",
"SPEECH OUTPUT: '2 of 3.'",
"SPEECH OUTPUT: 'expanded tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Down"))
sequence.append(utils.AssertPresentationAction(
"7. Down Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"8. Basic Where Am I",
["BRAILLE LINE: 'Firefox application Library frame GNOME TREE LEVEL 3'",
" VISIBLE: 'GNOME TREE LEVEL 3', cursor=1",
"SPEECH OUTPUT: 'GNOME tree item.'",
"SPEECH OUTPUT: '1 of 2.'",
"SPEECH OUTPUT: 'tree level 3'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"9. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu expanded TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu expanded TREE LEV', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Menu.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 2'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Left"))
sequence.append(utils.AssertPresentationAction(
"10. Left Arrow to collapse folder",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Menu collapsed TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Menu collapsed TREE LE', cursor=1",
"SPEECH OUTPUT: 'collapsed'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"11. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame Bookmarks Toolbar TREE LEVEL 2'",
" VISIBLE: 'Bookmarks Toolbar TREE LEVEL 2', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Up"))
sequence.append(utils.AssertPresentationAction(
"12. Up Arrow in tree",
["BRAILLE LINE: 'Firefox application Library frame All Bookmarks expanded TREE LEVEL 1'",
" VISIBLE: 'All Bookmarks expanded TREE LEVE', cursor=1",
"SPEECH OUTPUT: 'All Bookmarks.'",
"SPEECH OUTPUT: 'expanded.'",
"SPEECH OUTPUT: 'tree level 1'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("Tab"))
sequence.append(utils.AssertPresentationAction(
"13. Tab back to tree table",
["BRAILLE LINE: 'Firefox application Library frame tree table Bookmarks Toolbar table row TREE LEVEL 1'",
" VISIBLE: 'Bookmarks Toolbar table row TR', cursor=1",
"SPEECH OUTPUT: 'Bookmarks Toolbar '"]))
sequence.append(KeyComboAction("<Alt>F4"))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
|
madzebra/BitSend
|
qa/rpc-tests/zmq_test.py
|
Python
|
mit
| 3,224
| 0.006514
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitsend Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test ZMQ interface
#
from test_framework.test_framework import BitsendTestFramework
from test_framework.util import *
import zmq
import struct
class ZMQTest (BitsendTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 4
port = 28332
def setup_nodes(self):
self.zmqContext = zmq.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % self.port)
return start_nodes(self.num_nodes, self.options.tmpdir, extra_args=[
['-zmqpubhashtx=tcp://127.0.0.1:'+str(self.port), '-zmqpubhashblock=tcp://127.0.0.1:'+str(self.port)],
[],
[],
[]
])
def run_test(self):
self.sync_all()
genhashes = self.nodes[0].generate(1)
self.sync_all()
print("listen...")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
assert_equal(topic, b"hashtx")
body = msg[1]
nseq = msg[2]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) #must be sequence 0 on hashtx
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) #must be sequence 0 on hashblock
blkhash = bytes_to_hex_str(body)
assert_equal(genhashes[0], blkhash) #blockhash from generate must be equal to the hash received over zmq
n = 10
genhashes = self.nodes[1].generate(n)
self.sync_all()
zmqHashes = []
blockcount = 0
|
for x in range(0,n*2):
msg = self.zmqSubSocket.recv_multipart()
|
topic = msg[0]
body = msg[1]
if topic == b"hashblock":
zmqHashes.append(bytes_to_hex_str(body))
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount+1)
blockcount += 1
for x in range(0,n):
assert_equal(genhashes[x], zmqHashes[x]) #blockhash from generate must be equal to the hash received over zmq
#test tx from a second node
hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
# now we should receive a zmq msg because the tx was broadcast
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
hashZMQ = ""
if topic == b"hashtx":
hashZMQ = bytes_to_hex_str(body)
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount+1)
assert_equal(hashRPC, hashZMQ) #blockhash from generate must be equal to the hash received over zmq
if __name__ == '__main__':
ZMQTest ().main ()
|
AustinHartman/randomPrograms
|
stringRep.py
|
Python
|
gpl-3.0
| 148
| 0.006757
|
string = (input("What
|
is your string?\n"))
string.replace('K', 'M'[max])
string.replace('O', 'Q'[max])
string.replace('E', 'G'[
|
max])
print(string)
|
mrnamingo/vix4-34-enigma2-bcm
|
lib/python/Components/ServiceScan.py
|
Python
|
gpl-2.0
| 7,567
| 0.037531
|
from enigma import eComponentScan, iDVBFrontend
from Components.NimManager import nimmanager as nimmgr
from Tools.Transponder import getChannelNumber
class ServiceScan:
Idle = 1
Running = 2
Done = 3
Error = 4
Errors = {
0: _("error starting scanning"),
1: _("error while scanning"),
2: _("no resource manager"),
3: _("no channel list")
}
def scanStatusChanged(self):
if self.state == self.Running:
self.progressbar.setValue(self.scan.getProgress())
self.lcd_summary.updateProgress(self.scan.getProgress())
if self.scan.isDone():
errcode = self.scan.getError()
if errcode == 0:
self.state = self.Done
self.servicelist.listAll()
else:
self.state = self.Error
self.errorcode = errcode
self.network.setText("")
self.transponder.setText("")
else:
result = self.foundServices + self.scan.getNumServices()
percentage = self.scan.getProgress()
if percentage > 99:
percentage = 99
#TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol)
message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage
message += ", "
#TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far
message += ngettext("%d channel found", "%d channels found", result) % result
self.text.setText(message)
transponder = self.scan.getCurrentTransponder()
network = ""
tp_text = ""
if transponder:
tp_type = transponder.getSystem()
if tp_type == iDVBFrontend.feSatellite:
network = _("Satellite")
tp = transponder.getDVBS()
orb_pos = tp.orbital_position
try:
sat_name = str(nimmgr.getSatDescription(orb_pos))
except KeyError:
sat_name = ""
if orb_pos > 1800: # west
orb_pos = 3600 - orb_pos
h = _("W")
else:
h = _("E")
if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name:
network = sat_name
else:
network = "%s %d.%d %s" % (sat_name, orb_pos / 10, orb_pos % 10, h)
tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "")
if tp_text == "DVB-S2":
tp_text = "%s %s" % ( tp_text,
{ tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK",
tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16" }.get(tp.modulation, ""))
tp_text = "%s %d%c / %d / %s" % ( tp_text, tp.frequency/1000,
{ tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L',
tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '),
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8
|
_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, ""))
elif tp_type == iDVBFrontend.feCable:
network = _("Cable")
tp = transponder.getDVBC()
tp_text = "DVB-C %s %d / %d / %s" %( { tp.Modulation_Auto : "AUTO",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM25
|
6" }.get(tp.modulation, ""),
tp.frequency,
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5", tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec_inner, ""))
elif tp_type == iDVBFrontend.feTerrestrial:
network = _("Terrestrial")
tp = transponder.getDVBT()
channel = getChannelNumber(tp.frequency, self.scanList[self.run]["feid"])
if channel:
channel = _("CH") + "%s " % channel
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = "%s %s %s %s" %(
{
tp.System_DVB_T_T2 : "DVB-T/T2",
tp.System_DVB_T : "DVB-T",
tp.System_DVB_T2 : "DVB-T2"
}.get(tp.system, ""),
{
tp.Modulation_QPSK : "QPSK",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64",
tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256"
}.get(tp.modulation, ""),
"%s%s" % (channel, freqMHz.replace(".0","")),
{
tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz",
tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz",
tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz"
}.get(tp.bandwidth, ""))
else:
print "unknown transponder type in scanStatusChanged"
self.network.setText(network)
self.transponder.setText(tp_text)
if self.state == self.Done:
result = self.foundServices + self.scan.getNumServices()
self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", result) % result)
if self.state == self.Error:
self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) )
if self.state == self.Done or self.state == self.Error:
if self.run != len(self.scanList) - 1:
self.foundServices += self.scan.getNumServices()
self.execEnd()
self.run += 1
self.execBegin()
def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary):
self.foundServices = 0
self.progressbar = progressbar
self.text = text
self.servicelist = servicelist
self.passNumber = passNumber
self.scanList = scanList
self.frontendInfo = frontendInfo
self.transponder = transponder
self.network = network
self.run = 0
self.lcd_summary = lcd_summary
def doRun(self):
self.scan = eComponentScan()
self.frontendInfo.frontend_source = lambda : self.scan.getFrontend()
self.feid = self.scanList[self.run]["feid"]
self.flags = self.scanList[self.run]["flags"]
self.networkid = 0
if self.scanList[self.run].has_key("networkid"):
self.networkid = self.scanList[self.run]["networkid"]
self.state = self.Idle
self.scanStatusChanged()
for x in self.scanList[self.run]["transponders"]:
self.scan.addInitial(x)
def updatePass(self):
size = len(self.scanList)
if size > 1:
self.passNumber.setText(_("pass") + " " + str(self.run + 1) + "/" + str(size) + " (" + _("Tuner") + " " + str(self.scanList[self.run]["feid"]) + ")")
def execBegin(self):
self.doRun()
self.updatePass()
self.scan.statusChanged.get().append(self.scanStatusChanged)
self.scan.newService.get().append(self.newService)
self.servicelist.clear()
self.state = self.Running
err = self.scan.start(self.feid, self.flags, self.networkid)
self.frontendInfo.updateFrontendData()
if err:
self.state = self.Error
self.errorcode = 0
self.scanStatusChanged()
def execEnd(self):
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
if not self.isDone():
print "*** warning *** scan was not finished!"
del self.scan
def isDone(self):
return self.state == self.Done or self.state == self.Error
def newService(self):
newServiceName = self.scan.getLastServiceName()
newServiceRef = self.scan.getLastServiceRef()
self.servicelist.addItem((newServiceName, newServiceRef))
self.lcd_summary.updateService(newServiceName)
def destroy(self):
pass
|
numerai/submission-criteria
|
tests/benchmark_base.py
|
Python
|
apache-2.0
| 1,174
| 0.000852
|
import statistics
from datetime import datetime
class Benchmark():
def __init__(self, n_runs: int = 5, print_checkpoint: bool = True):
self.n_runs = n_runs
self.print_checkpoint = print_checkpoint
@staticmethod
def log(message: str) -> None:
print('[%s] - %s' % (datetime.now(), message))
def log_stats(self, times: list, unit: str = 'ms'):
self.log(
'[iteration %s/%s] %s' % (len(times), self.n_runs,
self.format_stats(times, unit=unit)))
@staticmethod
def format_stats(times: list, unit: str) -> str:
return 'median: %.2f%s, mean: %.2f%s, stdev: %.2f, max: %.2f%
|
s, min: %.2f%s' % (
statistics.median(times), unit, statistics.mean(times), unit,
statistics.stdev(times), max(times), unit, min(times), unit)
def start(self, suffix: str = None):
if suffix is None:
suffix = '...'
else:
suffix = ': ' + suffix
self.log('starting benchmark%s' % suffix)
self.benchmar
|
k()
def benchmark(self):
raise NotImplementedError('method benchmark() not implemented yet')
|
twitterdev/twitter-python-ads-sdk
|
twitter_ads/targeting.py
|
Python
|
mit
| 998
| 0
|
# Copyright (C) 2015 Twitter, Inc.
"""Container for all targeting related logic used by the Ads API SDK."""
from twitter_ads.http import Request
from twitter_ads.resou
|
rce import resource_property, Resource, Persistence
from twitter_ads import API_VERSION
from twitter_ads.utils import FlattenParams
import json
class AudienceEstimate(Resource, Persistence):
PROPERTIES = {}
RESOURCE = '/' + API_VERSION + '/accounts/
|
{account_id}/audience_estimate'
@classmethod
@FlattenParams
def load(klass, account, params):
resource = klass.RESOURCE.format(account_id=account.id)
headers = {'Content-Type': 'application/json'}
response = Request(account.client,
'post',
resource,
headers=headers,
body=json.dumps(params)).perform()
return klass(account).from_response(response.body['data'])
resource_property(AudienceEstimate, 'audience_size')
|
TecnoSalta/bg
|
mezzanine/utils/device.py
|
Python
|
bsd-2-clause
| 2,013
| 0
|
from __future__ import unicode_literals
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from mezzanine.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["mezzanine-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:
try:
user_agent = user_agent.decode("utf-8")
except AttributeError:
pass
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
|
return ""
def templates_for_device(request, templates):
"""
Given a template name (or list of them), returns the template names
as a list, with each name prefixed with the device directory
inserted before it's associate default in the list.
"""
from mezzanine.conf import settings
if not isinstance(templates, (list, tuple)
|
):
templates = [templates]
device = device_from_request(request)
device_templates = []
for template in templates:
if device:
device_templates.append("%s/%s" % (device, template))
if settings.DEVICE_DEFAULT and settings.DEVICE_DEFAULT != device:
default = "%s/%s" % (settings.DEVICE_DEFAULT, template)
device_templates.append(default)
device_templates.append(template)
return device_templates
|
rocktavious/pyversion
|
setup.py
|
Python
|
mit
| 107
| 0
|
f
|
rom setuptools import setup
setup(
setup_requires=['pbr', ],
pbr=T
|
rue,
auto_version="PBR",
)
|
maui-packages/calamares
|
src/modules/fstab/main.py
|
Python
|
gpl-3.0
| 5,216
| 0.000767
|
#!/usr/bin/env python3
# encoding: utf-8
# === This file is part of Calamares - <http://github.com/calamares> ===
#
# Copyright 2014, Aurélien Gâteau <agateau@kde.org>
#
# Calamares is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Calamares is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Calamares. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import libcalamares
HEADER = """# /etc/fstab: static file system information.
#
# Use 'blkid' to print the universally unique identifier for a device; this may
# be used with UUID= as a more robust way to name devices that works even if
# disks are added and removed. See fstab(5).
#
# <file system> <mount point> <type> <options> <dump> <pass>"""
# Turn Parted filesystem names into fstab names
FS_MAP = {
"fat16": "vfat",
"fat32": "vfat",
"linuxswap": "swap",
}
def mkdir_p(path):
if not os.path.exists(path):
os.makedirs(path)
def is_ssd_disk(disk_name):
filename = os.path.join("/sys/block", disk_name, "queue/rotational")
if not os.path.exists(filename):
# Should not happen unless sysfs changes, but better safe than sorry
return False
with open(filename) as f:
return f.read() == "0\n"
def disk_name_for_partition(pa
|
rtition):
name = os.path.basename(partition["device"])
return re.sub("[0-9]+$", "",
|
name)
class FstabGenerator(object):
def __init__(self, partitions, root_mount_point, mount_options,
ssd_extra_mount_options):
self.partitions = partitions
self.root_mount_point = root_mount_point
self.mount_options = mount_options
self.ssd_extra_mount_options = ssd_extra_mount_options
self.ssd_disks = set()
self.root_is_ssd = False
def run(self):
self.find_ssd_disks()
self.generate_fstab()
self.create_mount_points()
return None
def find_ssd_disks(self):
disks = {disk_name_for_partition(x) for x in self.partitions}
self.ssd_disks = {x for x in disks if is_ssd_disk(x)}
def generate_fstab(self):
# Create fstab
mkdir_p(os.path.join(self.root_mount_point, "etc"))
fstab_path = os.path.join(self.root_mount_point, "etc", "fstab")
with open(fstab_path, "w") as fl:
print(HEADER, file=fl)
for partition in self.partitions:
dct = self.generate_fstab_line_info(partition)
if dct:
self.print_fstab_line(dct, file=fl)
if self.root_is_ssd:
# Mount /tmp on a tmpfs
dct = dict(
device="tmpfs",
mount_point="/tmp",
fs="tmpfs",
options="defaults,noatime,mode=1777",
check=0,
)
self.print_fstab_line(dct, file=fl)
def generate_fstab_line_info(self, partition):
fs = partition["fs"]
mount_point = partition["mountPoint"]
disk_name = disk_name_for_partition(partition)
is_ssd = disk_name in self.ssd_disks
fs = FS_MAP.get(fs, fs)
if not mount_point and not fs == "swap":
return None
options = self.mount_options.get(fs, self.mount_options["default"])
if is_ssd:
extra = self.ssd_extra_mount_options.get(fs)
if extra:
options += "," + extra
if mount_point == "/":
check = 1
elif mount_point:
check = 2
else:
check = 0
if mount_point == "/":
self.root_is_ssd = is_ssd
return dict(
device="UUID=" + partition["uuid"],
mount_point=mount_point or "none",
fs=fs,
options=options,
check=check)
def print_fstab_line(self, dct, file=None):
line = "{:41} {:<14} {:<7} {:<10} 0 {}".format(
dct["device"],
dct["mount_point"],
dct["fs"],
dct["options"],
dct["check"])
print(line, file=file)
def create_mount_points(self):
for partition in self.partitions:
if partition["mountPoint"]:
mkdir_p(self.root_mount_point + partition["mountPoint"])
def run():
gs = libcalamares.globalstorage
conf = libcalamares.job.configuration
partitions = gs.value("partitions")
root_mount_point = gs.value("rootMountPoint")
mount_options = conf["mountOptions"]
ssd_extra_mount_options = conf.get("ssdExtraMountOptions", {})
generator = FstabGenerator(partitions, root_mount_point,
mount_options, ssd_extra_mount_options)
return generator.run()
|
manojgudi/sandhi
|
modules/gr36/gnuradio-core/src/lib/filter/generate_gr_fir_sysconfig_generic.py
|
Python
|
gpl-3.0
| 4,373
| 0.011891
|
#!/bin/env python
# -*- python -*-
#
# Copyright 2003,2009 Free Software Foundation, Inc
|
.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or
|
modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from generate_utils import *
# ----------------------------------------------------------------
def make_gr_fir_sysconfig_generic_h ():
out = open_and_log_name ('gr_fir_sysconfig_generic.h', 'w')
if not out:
return
out.write (copyright)
out.write (
'''
/*
* WARNING: This file is automatically generated by
* generate_gr_fir_sysconfig_generic.py.
*
* Any changes made to this file will be overwritten.
*/
#ifndef _GR_FIR_SYSCONFIG_GENERIC_H_
#define _GR_FIR_SYSCONFIG_GENERIC_H_
#include <gr_fir_sysconfig.h>
''')
out.write (
'''
class gr_fir_sysconfig_generic : public gr_fir_sysconfig {
public:
''')
for sig in fir_signatures:
out.write ((' virtual gr_fir_%s *create_gr_fir_%s (const std::vector<%s> &taps);\n' %
(sig, sig, tap_type (sig))))
out.write ('\n')
for sig in fir_signatures:
out.write ((' virtual void get_gr_fir_%s_info (std::vector<gr_fir_%s_info> *info);\n' %
(sig, sig)))
out.write (
'''
};
#endif /* _GR_FIR_SYSCONFIG_GENERIC_H_ */
''')
out.close ()
# ----------------------------------------------------------------
def make_constructor (sig, out):
out.write ('''
static gr_fir_%s *
make_gr_fir_%s (const std::vector<%s> &taps)
{
return new gr_fir_%s_generic (taps);
}
''' % (sig, sig, tap_type (sig), sig))
def make_creator (sig, out):
out.write ('''
gr_fir_%s *
gr_fir_sysconfig_generic::create_gr_fir_%s (const std::vector<%s> &taps)
{
return make_gr_fir_%s (taps);
}
''' % (sig, sig, tap_type (sig), sig))
def make_info (sig, out):
out.write ('''
void
gr_fir_sysconfig_generic::get_gr_fir_%s_info (std::vector<gr_fir_%s_info> *info)
{
info->resize (1);
(*info)[0].name = "generic";
(*info)[0].create = make_gr_fir_%s;
}
''' % (sig, sig, sig))
# ----------------------------------------------------------------
def make_gr_fir_sysconfig_generic_cc ():
out = open_and_log_name ('gr_fir_sysconfig_generic.cc', 'w')
if not out:
return
out.write (copyright)
out.write (
'''
/*
* WARNING: This file is automatically generated by
* generate_gr_fir_sysconfig_generic.py.
*
* Any changes made to this file will be overwritten.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <gr_fir_sysconfig_generic.h>
''')
for sig in fir_signatures:
out.write ('#include <gr_fir_%s_generic.h>\n' % (sig))
out.write (
'''
/*
* ----------------------------------------------------------------
* static functions that serve as constructors returned by info
* ----------------------------------------------------------------
*/
''')
for sig in fir_signatures:
make_constructor (sig, out)
out.write (
'''
/*
* ----------------------------------------------------------------
* return instances of the generic C++ versions of these classes.
* ----------------------------------------------------------------
*/
''')
for sig in fir_signatures:
make_creator (sig, out)
out.write (
'''
/*
* Return info about available implementations.
*
* This is the bottom of the concrete hierarchy, so we set the
* size of the vector to 1, and install our info. Classes derived
* from us invoke us first, then append their own info.
*/
''')
for sig in fir_signatures:
make_info (sig, out)
out.close ()
# ----------------------------------------------------------------
def generate ():
make_gr_fir_sysconfig_generic_h ()
make_gr_fir_sysconfig_generic_cc ()
if __name__ == '__main__':
generate ()
|
microelly2/geodata
|
geodat/import_osm.py
|
Python
|
lgpl-3.0
| 23,641
| 0.05022
|
# -*- coding: utf-8 -*-
#-------------------------------------------------
#-- osm map importer
#--
#-- microelly 2016 v 0.4
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
'''import data from openstreetmap'''
#http://api.openstreetmap.org/api/0.6/map?bbox=11.74182,50.16413,11.74586,50.16561
#http://api.openstreetmap.org/api/0.6/way/384013089
#http://api.openstreetmap.org/api/0.6/node/3873106739
#\cond
from geodat.say import *
import time, json, os
import sys
if sys.version_info[0] !=2:
from importlib import reload
import urllib.request
from say import *
import time, json, os
try:
import urllib2
except:
import urllib
import pivy
from pivy import coin
import geodat.my_xmlparser
reload (geodat.my_xmlparser)
import geodat.transversmercator
from geodat.transversmercator import TransverseMercator
import geodat.inventortools as inventortools
import geodat.xmltodict
from geodat.xml
|
todict import parse
#\endcond
#------------------------------
#
# microelly 2016 ..
#
#------------------------------
import time
## get the elevation height of a single point
def getHeight(b,l):
''' get height
|
of a single point with latitude b, longitude l'''
anz=0
while anz<4:
source="https://maps.googleapis.com/maps/api/elevation/json?locations="+str(b)+','+str(l)
try:
response = urllib2.urlopen(source)
except:
response = urllib.request.urlopen(source)
ans=response.read()
if ans.find("OVER_QUERY_LIMIT"):
anz += 1
time.sleep(5)
else:
anz=10
s=json.loads(ans)
res=s['results']
for r in res:
return round(r['elevation']*1000,2)
## get the heights for a list of points
def getHeights(points):
''' get heights for a list of points'''
i=0
size=len(points)
while i<size:
source="https://maps.googleapis.com/maps/api/elevation/json?locations="
ii=0
if i>0:
time.sleep(1)
while ii < 20 and i < size:
p=points[i]
ss= p[1]+','+p[2] + '|'
source += ss
i += 1
ii += 1
source += "60.0,10.0"
response = urllib.request.urlopen(source)
ans=response.read()
s=json.loads(ans)
res=s['results']
heights= {}
for r in res:
key="%0.7f" %(r['location']['lat']) + " " + "%0.7f" %(r['location']['lng'])
heights[key]=r['elevation']
return heights
def organize():
'''create groups for the different object types
GRP_highways, GRP_building, GRP_landuse
'''
highways=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_highways")
landuse=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_landuse")
buildings=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_building")
pathes=App.activeDocument().addObject("App::DocumentObjectGroup","GRP_pathes")
for oj in App.activeDocument().Objects:
if oj.Label.startswith('building'):
buildings.addObject(oj)
# oj.ViewObject.Visibility=False
if oj.Label.startswith('highway') or oj.Label.startswith('way'):
highways.addObject(oj)
oj.ViewObject.Visibility=False
if oj.Label.startswith('landuse'):
landuse.addObject(oj)
oj.ViewObject.Visibility=False
if oj.Label.startswith('w_'):
pathes.addObject(oj)
oj.ViewObject.Visibility=False
#---------------------
from geodat.say import *
import re
#fn='/home/thomas/.FreeCAD//geodat3/50.340722-11.232647-0.015'
#fn='/home/thomas/.FreeCAD/system.cfg'
debug=False
#--------------------
## core method to download and import the data
#
#def import_osm(b,l,bk,progressbar,status):
# import_osm2(b,l,bk,progressbar,status,False)
def import_osm2(b,l,bk,progressbar,status,elevation):
dialog=False
debug=False
if progressbar:
progressbar.setValue(0)
if status:
status.setText("get data from openstreetmap.org ...")
FreeCADGui.updateGui()
content=''
bk=0.5*bk
dn=FreeCAD.ConfigGet("UserAppData") + "/geodat3/"
fn=dn+str(b)+'-'+str(l)+'-'+str(bk)
import os
if not os.path.isdir(dn):
os.makedirs(dn)
try:
say("I try to read data from cache file ... ")
say(fn)
f=open(fn,"r")
content=f.read()
# say(content)
# raise Exception("to debug:force load from internet")
except:
sayW("no cache file, so I connect to openstreetmap.org...")
lk=bk #
b1=b-bk/1113*10
l1=l-lk/713*10
b2=b+bk/1113*10
l2=l+lk/713*10
source='http://api.openstreetmap.org/api/0.6/map?bbox='+str(l1)+','+str(b1)+','+str(l2)+','+str(b2)
say(source)
import requests
response = requests.get(source)
data = response.text
lines=response.text.split('\n')
FreeCAD.t=response
f=open(fn,"w")
# f.write(response.text)
if response.status_code == 200:
with open(fn, 'wb') as f:
for chunk in response.iter_content(1024):
f.write(chunk)
f.close()
# print("huhu");return
if 0:
try:
say("read--")
response = urllib.request.urlopen(source)
#import ssl
#ssl._create_default_https_context = ssl._create_unverified_context
#response = urllib.request.urlopen(source)
# import requests
# response = requests.get(source)
say(response)
say("2huu")
first=True
content=''
f=open(fn,"w")
l=0
z=0
ct=0
say("2wkkw")
#say(response.text)
# lines=response.text.split('\n')
# say(len(lines))
say("ll")
# for line in lines:
for line in response:
print ("Y",line)
if status:
if z>5000:
status.setText("read data ..." + str(l))
z=0
FreeCADGui.updateGui()
l+=1
z+=1
if first:
first=False
else:
content += line
f.write(line)
f.close()
if status:
status.setText("FILE CLOSED ..." + str(l))
FreeCADGui.updateGui()
response.close()
except:
sayErr( "Fehler beim Lesen")
if status:
status.setText("got data from openstreetmap.org ...")
FreeCADGui.updateGui()
sayW("Beeenden - im zweiten versuch daten auswerten")
return False
if elevation:
baseheight=getHeight(b,l)
else:
baseheight=0
if debug:
say( "-------Data---------")
say(content)
if status:
status.setText("parse data ...")
FreeCADGui.updateGui()
say("------------------------------")
say(fn)
# fn='/home/thomas/.FreeCAD//geodat3/50.340722-11.232647-0.015'
say(fn)
tree=geodat.my_xmlparser.getData(fn)
# for element in tree.getiterator('node'):
# say(element.params)
# say("ways")
# for element in tree.getiterator('way'):
# say(element.params)
# say("relations")
# for element in tree.getiterator('relation'):
# say(element.params)
if 0:
try:
sd=parse(content)
except:
sayexc("Problem parsing data - abort")
status.setText("Problem parsing data - aborted, for details see Report view")
return
if debug: say(json.dumps(sd, indent=4))
if status:
status.setText("transform data ...")
FreeCADGui.updateGui()
relations=tree.getiterator('relation')
nodes=tree.getiterator('node')
ways=tree.getiterator('way')
bounds=tree.getiterator('bounds')[0]
# center of the scene
minlat=float(bounds.params['minlat'])
minlon=float(bounds.params['minlon'])
maxlat=float(bounds.params['maxlat'])
maxlon=float(bounds.params['maxlon'])
tm=TransverseMercator()
tm.lat=0.5*(minlat+maxlat)
tm.lon=0.5*(minlon+maxlon)
center=tm.fromGeographic(tm.lat,tm.lon)
corner=tm.fromGeographic(minlat,minlon)
size=[center[0]-corner[0],center[1]-corner[1]]
# map all points to xy-plane
points={}
nodesbyid={}
for n in nodes:
nodesbyid[n.params['id']]=n
ll=tm.fromGeographic(float(n.params['lat']),float(n.params['lon']))
points[str(n.params['id'])]=FreeCAD.Vector(ll[0]-center[0],ll[1]-center[1],0.0)
# say(points)
# say("abbruch3 -hier daten uebernehmen !!");return
# hack to catch deutsche umlaute
def beaustring(string):
res=''
for tk in zz:
try:
res += str(tk)
except:
if ord(tk)==223:
res += 'ß'
elif ord(tk)==246:
res += 'ö'
elif ord(tk)==196:
res += 'Ä'
elif ord(tk)==228:
res += 'ä'
elif ord(tk)==242:
res += 'ü'
else:
sayErr(["error sign",tk,ord(tk),string])
res +="#"
return res
if status:
status.setText("create visualizations ...")
FreeCADGui.updateGui()
App.newDocument("OSM Map")
say("Datei erzeugt")
area=App.ActiveDocument.add
|
mkacik/bcc
|
tools/uobjnew.py
|
Python
|
apache-2.0
| 5,131
| 0.000974
|
#!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
#
|
uobjnew Summarize object allocations in high-level languages.
# For Linux, uses BCC, eBPF.
#
# USAGE: uobjnew [-h] [-T TOP] [-v] {java,ruby,c} pid [interval]
#
# Copyright 2016 Sasha
|
Goldshtein
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 25-Oct-2016 Sasha Goldshtein Created this.
from __future__ import print_function
import argparse
from bcc import BPF, USDT
from time import sleep
examples = """examples:
./uobjnew java 145 # summarize Java allocations in process 145
./uobjnew c 2020 1 # grab malloc() sizes and print every second
./uobjnew ruby 6712 -C 10 # top 10 Ruby types by number of allocations
./uobjnew ruby 6712 -S 10 # top 10 Ruby types by total size
"""
parser = argparse.ArgumentParser(
description="Summarize object allocations in high-level languages.",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("language", choices=["java", "ruby", "c"],
help="language to trace")
parser.add_argument("pid", type=int, help="process id to attach to")
parser.add_argument("interval", type=int, nargs='?',
help="print every specified number of seconds")
parser.add_argument("-C", "--top-count", type=int,
help="number of most frequently allocated types to print")
parser.add_argument("-S", "--top-size", type=int,
help="number of largest types by allocated bytes to print")
parser.add_argument("-v", "--verbose", action="store_true",
help="verbose mode: print the BPF program (for debugging purposes)")
args = parser.parse_args()
program = """
#include <linux/ptrace.h>
struct key_t {
#if MALLOC_TRACING
u64 size;
#else
char name[50];
#endif
};
struct val_t {
u64 total_size;
u64 num_allocs;
};
BPF_HASH(allocs, struct key_t, struct val_t);
""".replace("MALLOC_TRACING", "1" if args.language == "c" else "0")
usdt = USDT(pid=args.pid)
#
# Java
#
if args.language == "java":
program += """
int alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0, size = 0;
bpf_usdt_readarg(2, ctx, &classptr);
bpf_usdt_readarg(4, ctx, &size);
bpf_probe_read(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
usdt.enable_probe("object__alloc", "alloc_entry")
#
# Ruby
#
elif args.language == "ruby":
create_template = """
int THETHING_alloc_entry(struct pt_regs *ctx) {
struct key_t key = { .name = "THETHING" };
struct val_t *valp, zero = {};
u64 size = 0;
bpf_usdt_readarg(1, ctx, &size);
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
program += """
int object_alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0;
bpf_usdt_readarg(1, ctx, &classptr);
bpf_probe_read(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_init(&key, &zero);
valp->num_allocs += 1; // We don't know the size, unfortunately
return 0;
}
"""
usdt.enable_probe("object__create", "object_alloc_entry")
for thing in ["string", "hash", "array"]:
program += create_template.replace("THETHING", thing)
usdt.enable_probe("%s__create" % thing, "%s_alloc_entry" % thing)
#
# C
#
elif args.language == "c":
program += """
int alloc_entry(struct pt_regs *ctx, size_t size) {
struct key_t key = {};
struct val_t *valp, zero = {};
key.size = size;
valp = allocs.lookup_or_init(&key, &zero);
valp->total_size += size;
valp->num_allocs += 1;
return 0;
}
"""
if args.verbose:
print(usdt.get_text())
print(program)
bpf = BPF(text=program, usdt_contexts=[usdt])
if args.language == "c":
bpf.attach_uprobe(name="c", sym="malloc", fn_name="alloc_entry",
pid=args.pid)
exit_signaled = False
print("Tracing allocations in process %d (language: %s)... Ctrl-C to quit." %
(args.pid, args.language or "none"))
while True:
try:
sleep(args.interval or 99999999)
except KeyboardInterrupt:
exit_signaled = True
print()
data = bpf["allocs"]
if args.top_count:
data = sorted(data.items(), key=lambda (k, v): v.num_allocs)
data = data[-args.top_count:]
elif args.top_size:
data = sorted(data.items(), key=lambda (k, v): v.total_size)
data = data[-args.top_size:]
else:
data = sorted(data.items(), key=lambda (k, v): v.total_size)
print("%-30s %8s %12s" % ("TYPE", "# ALLOCS", "# BYTES"))
for key, value in data:
if args.language == "c":
obj_type = "block size %d" % key.size
else:
obj_type = key.name
print("%-30s %8d %12d" %
(obj_type, value.num_allocs, value.total_size))
if args.interval and not exit_signaled:
bpf["allocs"].clear()
else:
exit()
|
marshmallow-code/apispec
|
tests/test_yaml_utils.py
|
Python
|
mit
| 908
| 0.001116
|
import pytest
from apispec import yaml_utils
def test_load_yaml_from_docstring():
def f():
"""
Foo
bar
baz quux
---
herp: 1
derp: 2
"""
result = yaml_utils.load_yaml_from_docstring(f.__doc__)
assert result == {"herp": 1, "derp": 2}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_yaml_from_docstring_empty_docstring(docstring):
assert yam
|
l_utils.load_yaml_from_docstring(docstring) == {}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_operations_from_docstring_empty_docstring(docstring):
assert yaml_util
|
s.load_operations_from_docstring(docstring) == {}
def test_dict_to_yaml_unicode():
assert yaml_utils.dict_to_yaml({"가": "나"}) == '"\\uAC00": "\\uB098"\n'
assert yaml_utils.dict_to_yaml({"가": "나"}, {"allow_unicode": True}) == "가: 나\n"
|
Advait-M/Tetros
|
src/Tetros.py
|
Python
|
gpl-3.0
| 64,805
| 0.001836
|
#!/usr/bin/env python
"""This is a game called Tetros made with Tkinter graphics (quite similar to Tetris)."""
# Import modules
from tkinter import *
from tkinter import filedialog
from tkinter import messagebox
from tkinter.ttk import *
import random
import math
import time
import cmath
import copy
import sys
import winsound
__author__ = "Advait Maybhate"
__copyright__ = "Copyright 2016, The Final Project"
__credits__ = [
"Jason Schattman",
"Huzaifa Arshad",
"Gaurav Iyer",
"Leon Fattakhov",
"Zach Chapman"]
__license__ = "GPL"
__version__ = "20"
__maintainer__ = "Advait Maybhate"
__status__ = "Stable Release"
# Create root in order to use tkinter
root = Tk()
Style().configure("TButton", padding=6, relief="flat", background="#33cc33")
root.title(string="Tetros") # Title window with game name
instructions = Canvas(
root,
width=800,
height=600,
background="white") # Make instructions canvas
# Make text box for user to enter speed at which tetrominoes should fall
eText = Combobox(root, font="Times 20 bold", values=["Easy - 0.5", "Medium - 0.3", "Hard - 0.1"])
# Make button for user to click in order to advance to the game screen
okayB = Button(
root,
text="Begin!",
command=lambda: getDifficulty())
screen = Canvas(
root,
width=600,
height=525,
background="white") # Make main game canvas
# Make button for quitting Tetros (present in the final game statistic
|
s screen)
quitB = Button(
root,
text="Quit Tetros",
command=lambda: endAll())
res
|
tartB = Button(
root,
text="Restart Tetros",
command=lambda: restart())
# Initialize variables and objects needed for the instructions screen
menubar = Menu(root)
menuB = Menu(menubar, tearoff=0)
menuB.add_command(label="Save Progress", command=lambda: save())
menuB.add_command(label="Load From File", command=lambda: loadSave())
menuB.add_command(label="Restart", command=lambda: restart())
menuB.add_command(label="Exit", command=lambda: exitB())
menubar.add_cascade(label="File", menu=menuB)
root.config(menu=menubar)
string = -1
def exitB():
"""Function called when the exit button is pressed to end the game."""
global qPressed
try:
if qPressed:
endAll()
endGame()
qPressed = True
except NameError:
endAll()
def setInitialValues():
"""Initializes many variables used later on in the game."""
global length, clearedRows, blocks3d, blockCoords, blocks, paused, predictShape, qPressed, centres, colours, floor, counter, functions, s, score, scoreP, tetrisSong
counter = -1 # Keeps track of how many pieces have been dropped
length = 25 # Length of a single block
blockCoords = [] # List that holds all block coordinates
blocks = [] # List that holds all block objects (using create_polygon)
qPressed = False # Keeps track of whether q/Q/quit button has been pressed
centres = [] # List that holds all of the centres to the tetrominoes
colours = [] # List that holds all of the colours of the tetrominoes
floor = 500 # The y coordinate of the bottom side of the Tetros box
score = 0 # Keeps track of the score
scoreP = 0 # Actual text object of the score being displayed on the screen
# Adjust background song tempo according to difficulty
if 0.2 <= s:
tetrisSong = "0%.wav"
elif 0.1 < s < 0.2:
tetrisSong = "50%.wav"
elif s == 0.1:
tetrisSong = "100%.wav"
elif 0.05 <= s < 0.1:
tetrisSong = "150%.wav"
else:
tetrisSong = "200%.wav"
# List of functions to make tetrominoes
functions = [makei, makej, makel, makeo, makes, maket, makez]
# Initializing a variable to assign to the next shape tetromino (top right
# of the interface)
predictShape = 0
paused = False # Keeps track of whether the pause button has been pressed
clearedRows = 0 # Keeps track of how many rows have been cleared
blocks3d = PhotoImage(file="End.gif") # Final game screen background image
def hexadecimal():
"""Returns a random hexadecimal (used for a random colour)."""
hexadecimals = "#"
for i in range(0, 6):
a = random.randint(48, 70)
while 58 <= a <= 64:
a = random.randint(48, 70)
hexadecimals += chr(a)
return hexadecimals
# MAKE GRID OVERLAY (only enable if developing)
def overlay():
"""Makes a grid or dot overlay."""
global gridOverlay, dotOverlay
# Boolean that controls whether grid overlay should be present (used for
# developing)
gridOverlay = False
if gridOverlay:
spacing = 25 # Spacing between grid lines
for x in range(0, 600, spacing): # Draw vertical lines
screen.create_line(x, 10, x, 800, fill="black")
screen.create_text(
x,
0,
text=str(x),
font="Times 8",
anchor=N) # Label lines with coordinates
for y in range(0, 525, spacing): # Draw horizontal lines
screen.create_line(20, y, 800, y, fill="black")
screen.create_text(
4,
y,
text=str(y),
font="Times 8",
anchor=W) # Label lines with coordinates
dotOverlay = True # Boolean that controls whether dot overlay should be present
if dotOverlay:
spacing = 25 # Spacing between dots
# Draw dot grid on Tetros box
for x in range(25, 300, spacing):
for y in range(0, 525, spacing):
screen.create_oval(x - 1, y - 1, x + 1, y + 1, fill="black")
# Draw dot grid on "Next Shape" box
for x in range(400, 525, spacing):
for y in range(125, 200, spacing):
screen.create_oval(x - 1, y - 1, x + 1, y + 1, fill="black")
def rotatePoint(point, centre, thetaDegrees):
"""Rotates given point around the given centre by the given angle."""
x = point[0] # Pull out x coordinate
y = point[1] # Pull out y coordinate
thetaRadians = math.radians(thetaDegrees) # Convert degrees to radians
# Express angle as a complex number (for calculations)
thetac = cmath.exp(thetaRadians * 1j)
centreX = centre[0] # Pull out x coordinate of centre
centreY = centre[1] # Pull out y coordinate of centre
# Create a complex expression to represent the centre
centrec = complex(centreX, centreY)
# v consists of the x and y values of the rotated coordinate in its real
# and imaginary parts respectively
v = thetac * (complex(x, y) - centrec) + centrec
newX = v.real # Pull out new x coordinate (rotated)
newY = v.imag # Pull out new y coordinate (rotated)
return [newX, newY] # Return list of the rotated coordinates
def makeWholeCoords():
"""Deletes all objects on screen and redraws them using the coordinates list."""
global blockCoords, blocks, colours
# Delete all objects on the screen (to make sure all objects get updated)
screen.delete(ALL)
# Go through blockCoords and redraw all the blocks that it contains the
# coordinates for
for i in range(0, len(blockCoords) - 1):
for g in range(0, len(blockCoords[i])):
coords = []
for p in range(0, 4):
coords.append(blockCoords[i][g][p])
blocks[i][g] = screen.create_polygon(coords, fill=colours[i], outline="black", width="2")
def rotatePolygon(polygon, centre, angleDegrees):
"""Rotates given polygon around given centre by given angle."""
# Rotate all points in the polygon using the rotatePoint function
for i in range(0, len(polygon)):
polygon[i] = rotatePoint(polygon[i], centre, angleDegrees)
return polygon # Return the new polygon coordinates
def makeCoords(x, y):
"""Returns the coordinates of a block with the given coordinates as its top left corner."""
return [[x, y], [x + 25, y], [x + 25, y + 25], [x, y + 25]]
def makePolygon(coords, colour):
"""Draws four blocks using given coordinates and given colour."""
block1 = screen.create_polygon(
coords[0],
fill=colour,
|
EricHripko/TheQuestOfTin
|
tqot/animation.py
|
Python
|
gpl-3.0
| 2,600
| 0.000385
|
import pygame.time
class Animation:
"""
Class that defines simple looped frame-by-frame animations
on art-assets and plays them when prompted to.
"""
def __init__(self, sprite):
"""
Create a new animation.
:param sprite: Asset sprite that will play the animation.
:return: An empty animation instance.
"""
# Store the associated sprite
self.sprite = sprite
# Initialise the animation as empty by default
self.frames = []
# Current frame index
self.current = 0
# Ticks when the frame got shown
self.duration = 0
def add_frame(self, state, duration):
"""
Add a new frame to the animation.
:param state: State the the art asset should be in.
:param duration: Duration of the frame in milliseconds.
"""
self.frames.append((state, duration))
def is_playing(self):
"""
Identify whether the animation is currently playing.
:return: True if animation is active, false otherwise.
"""
return self.duration != 0
def play(self):
"""
Play the animation. Takes care of setting the animation off,
measuring all the timings, changing states and looping. Similar
to update() method of a sprite.
"""
# Just started playing the animation
if self.duration == 0:
self.duration = pygame.time.get_ticks()
# Retrieve the current frame information
(state, duration) = self.frames[self.current]
self.sprite.set_state(state)
# Check whether the state needs changing
elapsed = pygame.time.get_ticks() - self.duration
if elapsed > duration:
self.current += 1
self.d
|
uration = pygame.time.get_ticks()
# Check whether the loop is needed
if self.cur
|
rent == len(self.frames):
self.current = 0
def invalidate(self):
"""
Update the current frame displayed by the animation.
Internal routine necessary to be carried out after the
animation flow was manually altered.
"""
(state, duration) = self.frames[self.current]
self.sprite.set_state(state)
def stop(self):
"""
Stop playing the animation and reset its state.
"""
# Do not trigger reset if animation was not being played
if not self.is_playing():
return
# Reset the animation state
self.current = 0
self.duration = 0
self.invalidate()
|
valentingalea/vinyl-shelf-finder
|
pantilthat/finder.py
|
Python
|
mit
| 2,511
| 0.024691
|
#!/usr/bin/env python
import pantilthat
import time
import sys
import math
import servo_ranges
def tick():
time.sleep(0.010)
class Shelf(object):
def __init__(self, num, start, end, tilt):
self.count = num; # num of records
self.pan_start = start; # degress +
self.pan_end = end; # degrees -
self.tilt_pos = tilt; # degrees
def map_pos_to_angles(self, pos):
if (pos <= 0 or pos > self.count):
return 0
# naive algorithm: just lerp the range of angles
# it works well enough
pan_range = abs(self.pan_start) + abs(self.pan_end)
incr = float(pan_range) / self.count
return int(self.pan_start - pos * incr)
# a better algoritm: get the angle based on physical
# measurements - but somehow behaves very poorly
# dist = 700. #mm
# record_thick = 10. #mm
# error = .5 #mm
# offset = (self.count / 2. - pos) * record_thick + error
# print offset
# angle = math.atan2(offset, dist)
#
|
return int(math.degrees(angle))
max_shelves = 5
shelves = [
Shelf(42, 24, -29, -68),
Shelf(68, 24, -28, -40),
Shelf(80, 26, -25, 0),
Shelf(88, 25, -26, +40),
Shelf(68, 26, -26, +65)
]
# sanity checks
if len(sy
|
s.argv) != 3:
print "Usage: <shelf id> <shelf pos>\n"
exit()
# setup
servo_ranges.calibrate()
# read last cmd
orig_pan = pantilthat.get_pan()
orig_tilt = pantilthat.get_tilt()
print "found pan: %i; tilt: %i" % (orig_pan, orig_tilt)
# get args
in_id = int(sys.argv[1])
in_id = (in_id - 1) % max_shelves # convert to C array notation
in_pos = int(sys.argv[2])
print "searching: %i %i" % (in_id, in_pos)
# find
new_pan = shelves[in_id].map_pos_to_angles(in_pos)
new_tilt = shelves[in_id].tilt_pos
# debug
print "output: %i %i" % (new_pan, new_tilt)
#exit()
# start laser
pantilthat.light_mode(pantilthat.PWM)
pantilthat.brightness(128)
# do the requests
pan = orig_pan
pan_incr = 1 if new_pan > orig_pan else -1
while pan != new_pan:
pan = pan + pan_incr
#print pan
pantilthat.pan(pan)
tick()
tilt = orig_tilt
tilt_incr = 1 if new_tilt > orig_tilt else -1
while tilt != new_tilt:
tilt = tilt + tilt_incr
#print tilt
pantilthat.tilt(tilt)
tick()
# because the servos are so shit
# do a dance to hide the horrible inaccuracy
a = 0.
while a < (12 * math.pi):
a += math.pi / 20.
r = int(math.sin(a) * 5.)
pantilthat.pan(new_pan + r)
time.sleep(0.005)
# sec; to allow the servos to move before they are auto shut down on exit
print "waiting:"
for t in range(0, 3):
time.sleep(1)
print "."
# turn off the laser on the way out
pantilthat.brightness(0)
|
gangadharkadam/v5_erp
|
erpnext/setup/page/setup_wizard/install_fixtures.py
|
Python
|
agpl-3.0
| 12,019
| 0.018055
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def install(country=None):
records = [
# address template
{'doctype':"Address Template", "country": country},
# item group
{'doctype': 'Item Group', 'item_group_name': _('All Item Groups'),
'is_group': 'Yes', 'parent_item_group': ''},
{'doctype': 'Item Group', 'item_group_name': _('Products'),
'is_group': 'No', 'parent_item_group': _('All Item Groups'), "show_in_website": 1 },
{'doctype': 'Item Group', 'item_group_name': _('Raw Material'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Services'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Sub Assemblies'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
{'doctype': 'Item Group', 'item_group_name': _('Consumable'),
'is_group': 'No', 'parent_item_group': _('All Item Groups') },
# deduction type
{'doctype': 'Deduction Type', 'name': _('Income Tax'), 'description': _('Income Tax'), 'deduction_name': _('Income Tax')},
# earning type
{'doctype': 'Earning Type', 'name': _('Basic'), 'description': _('Basic'), 'earning_name': _('Basic'), 'taxable': 'Yes'},
# expense claim type
{'doctype': 'Expense Claim Type', 'name': _('Calls'), 'expense_type': _('Calls')},
{'doctype': 'Expense Claim Type', 'name': _('Food'), 'expense_type': _('Food')},
{'doctype': 'Expense Claim Type', 'name': _('Medical'), 'expense_type': _('Medical')},
{'doctype': 'Expense Claim Type', 'name': _('Others'), 'expense_type': _('Others')},
{'doctype': 'Expense Claim Type', 'name': _('Travel'), 'expense_type': _('Travel')},
# leave type
{'doctype': 'Leave Type', 'leave_type_name': _('Casual Leave'), 'name': _('Casual Leave'), 'is_encash': 1, 'is_carry_forward': 1, 'max_days_allowed': '3', },
{'doctype': 'Leave Type', 'leave_type_name': _('Compensatory Off'), 'name': _('Compensatory Off'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Sick Leave'), 'name': _('Sick Leave'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Privilege Leave'), 'name': _('Privilege Leave'), 'is_encash': 0, 'is_carry_forward': 0, },
{'doctype': 'Leave Type', 'leave_type_name': _('Leave Without Pay'), 'name': _('Leave Without Pay'), 'is_encash': 0, 'is_carry_forward': 0, 'is_lwp':1},
# Employment Type
{'doctype': 'Employment Type', 'employee_type_name': _('Full-time')},
{'doctype': 'Employment Type', 'employee_type_name': _('Part-time')},
{'doctype': 'Employment Type', 'employee_type_name': _('Probation')},
{'doctype': 'Employment Type', 'employee_type_name': _('Contract')},
{'doctype': 'Employment Type', 'employee_type_name': _('Commission')},
{'doctype': 'Employment Type', 'employee_type_name': _('Piecework')},
{'doctype': 'Employment Type', 'employee_type_name': _('Intern')},
{'doctype': 'Employment Type', 'employee_type_name': _('Apprentice')},
# Department
{'doctype': 'Department'
|
, 'department_name': _('Accounts')},
{'doctype': 'Department', 'department_name': _('Marketing')},
{'doctype': 'Department', 'department_name': _('Sale
|
s')},
{'doctype': 'Department', 'department_name': _('Purchase')},
{'doctype': 'Department', 'department_name': _('Operations')},
{'doctype': 'Department', 'department_name': _('Production')},
{'doctype': 'Department', 'department_name': _('Dispatch')},
{'doctype': 'Department', 'department_name': _('Customer Service')},
{'doctype': 'Department', 'department_name': _('Human Resources')},
{'doctype': 'Department', 'department_name': _('Management')},
{'doctype': 'Department', 'department_name': _('Quality Management')},
{'doctype': 'Department', 'department_name': _('Research & Development')},
{'doctype': 'Department', 'department_name': _('Legal')},
# Designation
{'doctype': 'Designation', 'designation_name': _('CEO')},
{'doctype': 'Designation', 'designation_name': _('Manager')},
{'doctype': 'Designation', 'designation_name': _('Analyst')},
{'doctype': 'Designation', 'designation_name': _('Engineer')},
{'doctype': 'Designation', 'designation_name': _('Accountant')},
{'doctype': 'Designation', 'designation_name': _('Secretary')},
{'doctype': 'Designation', 'designation_name': _('Associate')},
{'doctype': 'Designation', 'designation_name': _('Administrative Officer')},
{'doctype': 'Designation', 'designation_name': _('Business Development Manager')},
{'doctype': 'Designation', 'designation_name': _('HR Manager')},
{'doctype': 'Designation', 'designation_name': _('Project Manager')},
{'doctype': 'Designation', 'designation_name': _('Head of Marketing and Sales')},
{'doctype': 'Designation', 'designation_name': _('Software Developer')},
{'doctype': 'Designation', 'designation_name': _('Designer')},
{'doctype': 'Designation', 'designation_name': _('Assistant')},
{'doctype': 'Designation', 'designation_name': _('Researcher')},
# territory
{'doctype': 'Territory', 'territory_name': _('All Territories'), 'is_group': 'Yes', 'name': _('All Territories'), 'parent_territory': ''},
# customer group
{'doctype': 'Customer Group', 'customer_group_name': _('All Customer Groups'), 'is_group': 'Yes', 'name': _('All Customer Groups'), 'parent_customer_group': ''},
{'doctype': 'Customer Group', 'customer_group_name': _('Individual'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Commercial'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Non Profit'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
{'doctype': 'Customer Group', 'customer_group_name': _('Government'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')},
# supplier type
{'doctype': 'Supplier Type', 'supplier_type': _('Services')},
{'doctype': 'Supplier Type', 'supplier_type': _('Local')},
{'doctype': 'Supplier Type', 'supplier_type': _('Raw Material')},
{'doctype': 'Supplier Type', 'supplier_type': _('Electrical')},
{'doctype': 'Supplier Type', 'supplier_type': _('Hardware')},
{'doctype': 'Supplier Type', 'supplier_type': _('Pharmaceutical')},
{'doctype': 'Supplier Type', 'supplier_type': _('Distributor')},
# Sales Person
{'doctype': 'Sales Person', 'sales_person_name': _('Sales Team'), 'is_group': "Yes", "parent_sales_person": ""},
# UOM
{'uom_name': _('Unit'), 'doctype': 'UOM', 'name': _('Unit'), "must_be_whole_number": 1},
{'uom_name': _('Box'), 'doctype': 'UOM', 'name': _('Box'), "must_be_whole_number": 1},
{'uom_name': _('Kg'), 'doctype': 'UOM', 'name': _('Kg')},
{'uom_name': _('Nos'), 'doctype': 'UOM', 'name': _('Nos'), "must_be_whole_number": 1},
{'uom_name': _('Pair'), 'doctype': 'UOM', 'name': _('Pair'), "must_be_whole_number": 1},
{'uom_name': _('Set'), 'doctype': 'UOM', 'name': _('Set'), "must_be_whole_number": 1},
{'uom_name': _('Hour'), 'doctype': 'UOM', 'name': _('Hour')},
{'uom_name': _('Minute'), 'doctype': 'UOM', 'name': _('Minute')},
# Mode of Payment
{'doctype': 'Mode of Payment', 'mode_of_payment': 'Check' if country=="United States" else _('Cheque')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Cash')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Credit Card')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Wire Transfer')},
{'doctype': 'Mode of Payment', 'mode_of_payment': _('Bank Draft')},
# Activity Type
{'doctype': 'Activity Type', 'activity_type': _('Planning')},
{'doctype': 'Activity Type', 'activity_type': _('Research')},
{'doctype': 'Activity Type', 'activity_type': _('Proposal Writing')},
{'doctype': 'Activity Type', 'activity_type': _('Execution')},
{'doctype': 'Activity Type', 'activity_type': _('Communication')},
# Industry Type
{'doctype'
|
DailyActie/Surrogate-Model
|
01-codes/numpy-master/doc/source/conf.py
|
Python
|
mit
| 9,985
| 0.001202
|
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
import os
import re
import sys
# Check Sphinx version
import sphinx
if sphinx.__version__ < "1.0.1":
raise RuntimeError("Sphinx 1.0.1 or newer required")
needs_sphinx = '1.0'
# -----------------------------------------------------------------------------
# General configuration
# -----------------------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
sys.path.insert(0, os.path.abspath('../sphinxext'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath', 'numpydoc',
'sphinx.ext.intersphinx', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'sphinx.ext.autosummary',
'matplotlib.sphinxext.plot_directive']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# General substitutions.
project = 'NumPy'
copyright = '2008-2009, The Scipy community'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
import numpy
# The short X.Y version (including .devXXXX, rcX, b1 suffixes if present)
version = re.sub(r'(\d+\.\d+)\.\d+(.*)', r'\1\2', numpy.__version__)
version = re.sub(r'(\.dev\d+).*?$', r'\1', version)
# The full version, including alpha/beta/rc tags.
release = numpy.__version__
print("%s %s" % (version, release))
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "autolink"
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -----------------------------------------------------------------------------
# HTML output
# -----------------------------------------------------------------------------
themedir = os.path.join(os.pardir, 'scipy-sphinx-theme', '_theme')
if not os.path.isdir(themedir):
raise RuntimeError("Get the scipy-sphinx-theme first, "
"via git submodule init && git submodule update")
html_theme = 'scipy'
html_theme_path = [themedir]
if 'scipyorg' in tags:
# Build for the scipy.org web
html_theme_options = {
"edit_link": True,
"sidebar": "right",
"scipy_org_logo": True,
"rootlinks": [("http://scipy.org/", "Scipy.org"),
("http://docs.scipy.org/", "Docs")]
}
else:
# Default build
html_theme_options = {
"edit_link": False,
"sidebar": "left",
"scipy_org_logo": False,
"rootlinks": []
}
html_sidebars = {'index': 'indexsidebar.html'}
html_additional_pages = {
'index': 'indexcontent.html',
}
html_title = "%s v%s Manual" % (project, version)
html_static_path = ['_static']
html_last_updated_fmt = '%b %d, %Y'
html_use_modindex = True
html_copy_source = False
html_domain_indices = False
html_file_suffix = '.html'
htmlhelp_basename = 'numpy'
pngmath_use_preview = True
pngmath_dvipng_args = ['-gamma', '1.5', '-D', '96', '-bg', 'Transparent']
# -----------------------------------------------------------------------------
# LaTeX output
# -----------------------------------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdautho
|
r = 'Written by the NumPy community'
latex_documents = [
('reference/index', 'numpy-ref.tex', 'NumPy Reference',
_stdauthor, 'manual'),
('user/index', 'numpy-user.tex', 'NumPy User Guide',
_stdauthor, 'manual'),
]
# The name of an image file (relative to this di
|
rectory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r'''
\usepackage{amsmath}
\DeclareUnicodeCharacter{00A0}{\nobreakspace}
% In the parameters section, place a newline after the Parameters
% header
\usepackage{expdlist}
\let\latexdescription=\description
\def\description{\latexdescription{}{} \breaklabel}
% Make Examples/etc section headers smaller and more compact
\makeatletter
\titleformat{\paragraph}{\normalsize\py@HeaderFamily}%
{\py@TitleColor}{0em}{\py@TitleColor}{\py@NormalColor}
\titlespacing*{\paragraph}{0pt}{1ex}{0pt}
\makeatother
% Fix footer/header
\renewcommand{\chaptermark}[1]{\markboth{\MakeUppercase{\thechapter.\ #1}}{}}
\renewcommand{\sectionmark}[1]{\markright{\MakeUppercase{\thesection.\ #1}}}
'''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = False
# -----------------------------------------------------------------------------
# Texinfo output
# -----------------------------------------------------------------------------
texinfo_documents = [
("contents", 'numpy', 'NumPy Documentation', _stdauthor, 'NumPy',
"NumPy: array processing for numbers, strings, records, and objects.",
'Programming',
1),
]
# -----------------------------------------------------------------------------
# Intersphinx configuration
# -----------------------------------------------------------------------------
intersphinx_mapping = {
'python': ('https://docs.python.org/dev', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
'matplotlib': ('http://matplotlib.org', None)
}
# -----------------------------------------------------------------------------
# NumPy extensions
# -----------------------------------------------------------------------------
# If we want to do a phantom import from an XML file for all autodocs
phantom_import_file = 'dump.xml'
# Make numpydoc to generate plots for example sections
numpydoc_use_plots = True
# -----------------------------------------------------------------------------
# Autosummary
# -----------------------------------------------------------------------------
import glob
autosummary_generate = glob.glob("reference/*.rst")
# -----------------------------------------------------------------------------
# Coverage checker
# -----------------------------------------------------------------------------
coverage_ignore_modules = r"""
""".split()
coverage_ignore_functions = r"""
test($|_) (some|all)true bitwise_not cumproduct pkgload
generic\.
""".split()
coverage_ignore_classes = r"""
""".split()
coverage_c_path = []
coverage_c_regexes = {}
coverage_ignore_c_items = {}
# -----------------------------------------------------------------------------
# Plots
# -----------------------------------------------------------------------------
plot_pre_code = """
import numpy as np
np.random.seed(0)
"""
plot_include_source = True
plot_formats = [('png', 100), 'pdf']
import math
phi = (math.sqrt(5) + 1) / 2
plot_rcparams = {
'font.size': 8,
'axes.titlesize': 8,
'axes.labelsize': 8,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'legend.fontsize': 8,
|
kbrebanov/ansible
|
lib/ansible/plugins/action/ce.py
|
Python
|
gpl-3.0
| 3,950
| 0.002025
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.cloudengine.ce import ce_provider_spec
from ansible.module_utils.network.common.utils import load_provider
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = load_provider(ce_provider_spec, self._task.args)
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ce'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
self._task.args['provider'] = provider.update(
host=pc.remote_addr,
port=pc.port,
username=pc.remote_user,
password=pc.password,
ssh_keyfile=pc.private_key_file
)
display.vvv('using
|
connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_pat
|
h, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
# make sure we are in the right cli context which should be
# enable mode and not config module
conn = Connection(socket_path)
out = conn.get_prompt()
while to_text(out, errors='surrogate_then_replace').strip().endswith(']'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
conn.send_command('exit')
out = conn.get_prompt()
task_vars['ansible_socket'] = socket_path
# make sure a transport value is set in args
self._task.args['transport'] = transport
result = super(ActionModule, self).run(tmp, task_vars)
return result
|
damianmoore/photo-manager
|
tests/factories.py
|
Python
|
agpl-3.0
| 2,156
| 0.000464
|
from pathlib import Path
from django.utils import timezone
import factory
from photonix.accounts.models import User
from photonix.photos.models import Library, LibraryUser, Photo, PhotoFile, Tag, PhotoTag, Task
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
username = 'test'
email = 'test@example.com'
has_config_persional_info = True
has_created_library = True
has_configured_importing = True
has_configured_image_analysis = True
class LibraryFactory(factory.django.DjangoModelFactory):
class Meta:
model = Library
name = factory.Sequence(lambda n: f'Test Library {n}')
classification_color_enabled = True
classification_location_enabled = True
classification_style_enabled = True
classification_object_enabled = True
classification_face_enabled = True
setup_stage_completed = True
class LibraryUserFactory(factory.django.DjangoModelFactory):
class Meta:
model = LibraryUser
library = factory.SubFactory(LibraryFactory)
user = factory.SubFactory(UserFactory)
owner = True
class PhotoFactory(factory.django.DjangoModelFactory):
class Meta:
model = Photo
library = factory.SubFactory(LibraryFactory)
class PhotoFileFactory(factory.django.DjangoModelFactory):
class Meta:
model = PhotoFile
photo = factory.SubFactory(PhotoFactory)
path = str(Path(__file__).parent / 'photos' / 'snow.jpg')
mimetype = 'image/jpeg'
bytes = 1000
file_modified_at = factory.LazyAttribute(lamb
|
da o: timezone.now())
class TagFactory(factory.django.DjangoModelFactory):
class Meta:
model = Ta
|
g
library = factory.SubFactory(LibraryFactory)
name = factory.Sequence(lambda n: f'Tag {n}')
class PhotoTagFactory(factory.django.DjangoModelFactory):
class Meta:
model = PhotoTag
photo = factory.SubFactory(PhotoFactory)
tag = factory.SubFactory(TagFactory)
class TaskFactory(factory.django.DjangoModelFactory):
class Meta:
model = Task
type = 'classify.style'
status = 'P'
library = factory.SubFactory(LibraryFactory)
|
atlab/attorch
|
attorch/regularizers.py
|
Python
|
mit
| 3,598
| 0.002779
|
import numpy as np
import torch
import torch.nn as nn
from itertools import product
from torch.nn import functional as F
#import pytorch_fft.fft as fft
# def laplace():
# return np.array([[0.25, 0.5, 0.25], [0.5, -3.0, 0.5], [0.25, 0.5, 0.25]]).astype(np.float32)[None, None, ...]
def laplace():
return np.array([[0, -1, 0], [-1, 4, -1], [0, -1, 0]]).astype(np.float32)[None, None, ...]
def laplace3d():
l = np.zeros((3, 3, 3))
l[1, 1, 1] = -6.
l[1, 1, 2] = 1.
l[1, 1, 0] = 1.
l[1, 0, 1] = 1.
l[1, 2, 1] = 1.
l[0, 1, 1] = 1.
l[2, 1, 1] = 1.
return l.astype(np.float32)[None, None, ...]
#def fft_smooth(grad, factor=1/4):
# """
# Tones down the gradient with (1/f)**(2 * factor) filter in the Fourier domain.
# Equivalent to low-pass filtering in the spatial domain.
#
# `grad` is an at least 2D CUDA Tensor, where the last two dimensions are treated
# as images to apply smoothening transformation.
#
# `factor` controls the strength of the fall off.
# """
# h, w = grad.size()[-2:]
# tw = np.minimum(np.arange(0, w), np.arange(w, 0, -1), dtype=np.float32)#[-(w+2)//2:]
# th = np.minimum(np.arange(0, h), np.arange(h, 0, -1), dtype=np.float32)
# t = 1 / np.maximum(1.0, (tw[None,:] ** 2 + th[:,None] ** 2) ** (factor))
# F = torch.Tensor(t / t.mean()).cuda()
# rp, ip = fft.fft2(grad.data, torch.zeros_like(grad.data))
# return Variable(fft.ifft2(rp * F, ip * F)[0])
class Laplace(nn.Module):
"""
Laplace filter for a stack of data.
"""
def __init__(self, padding=0):
super().__init__()
self._padding = padding
self.register_buffer('filter', torch.from_numpy(laplace()))
def forward(self, x):
return F.conv2d(x, self.filter, padding=self._padding, bias=None)
class Laplace3d(nn.Module):
"""
Laplace filter for a stack of data.
"""
def __init__(self):
super().__init__()
self.register_buffer('filter', torch.from_numpy(laplace3d()))
def forward(self, x):
return F.conv3d(x, self.filter, bias=None)
class LaplaceL2(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self, padding=0):
super().__init__()
self.laplace = Laplace(padding=padding)
def forward(self, x, weights=None):
ic, oc, k1, k2 = x.size()
if weights is None:
weights = 1.0
return (self.laplace(x.view(ic * oc, 1, k1, k2)).view(ic, oc,
|
k1, k2).pow(2) * weights).mean() / 2
class LaplaceL23d(nn.Module):
"""
Laplace r
|
egularizer for a 2D convolutional layer.
"""
def __init__(self):
super().__init__()
self.laplace = Laplace3d()
def forward(self, x):
ic, oc, k1, k2, k3 = x.size()
return self.laplace(x.view(ic * oc, 1, k1, k2, k3)).pow(2).mean() / 2
class FlatLaplaceL23d(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self):
super().__init__()
self.laplace = Laplace()
def forward(self, x):
ic, oc, k1, k2, k3 = x.size()
assert k1 == 1, 'time dimension must be one'
return self.laplace(x.view(ic * oc, 1, k2, k3)).pow(2).mean() / 2
class LaplaceL1(nn.Module):
"""
Laplace regularizer for a 2D convolutional layer.
"""
def __init__(self, padding=0):
super().__init__()
self.laplace = Laplace(padding=padding)
def forward(self, x):
ic, oc, k1, k2 = x.size()
return self.laplace(x.view(ic * oc, 1, k1, k2)).abs().mean()
|
Therp/stock-logistics-warehouse
|
__unported__/stock_available_immediately/__openerp__.py
|
Python
|
agpl-3.0
| 1,363
| 0
|
# -*- coding: utf-8 -*-
#
#
# Author: Guewen Baconnier
# Copyright 2010-2012 Camptocamp SA
# Copyright (C) 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
"name": "Immediately Usable Stock Quantity",
"version": "1.0",
"depends": ["product", "stock", ],
"author": "Camptocamp",
"license": "AGPL-3",
"description": """
Compute the immediately usable
|
stock.
Immediately usable is computed : Quantity on Hand - Outgoing Stock.
""",
"website": "http://tinyerp.com/module_account.html",
"category": "Generic Modules/Stock",
"data": ["product_view.xml",
],
"active": False,
'installable'
|
: False
}
|
msanatan/organise
|
run.py
|
Python
|
mit
| 36
| 0
|
from organise i
|
mport app
app.
|
run()
|
aurex-linux/virt-manager
|
virtManager/engine.py
|
Python
|
gpl-2.0
| 41,820
| 0.001124
|
#
# Copyright (C) 2006, 2013 Red Hat, Inc.
# Copyright (C) 2006 Daniel P. Berrange <berrange@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
# pylint: disable=E0611
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gtk
# pylint: enable=E0611
import logging
import re
import Queue
import threading
import libvirt
from virtinst import util
from virtManager import packageutils
from virtManager.about import vmmAbout
from virtManager.baseclass import vmmGObject
from virtManager.clone import vmmCloneVM
from virtManager.connect import vmmConnect
from virtManager.connection import vmmConnection
from virtManager.preferences import vmmPreferences
from virtManager.manager import vmmManager
from virtManager.migrate import vmmMigrateDialog
from virtManager.details import vmmDetails
from virtManager.asyncjob import vmmAsyncJob
from virtManager.create import vmmCreate
from virtManager.host import vmmHost
from virtManager.error import vmmErrorDialog
from virtManager.systray import vmmSystray
from virtManager.delete import vmmDeleteDialog
# Enable this to get a report of leaked objects on app shutdown
# gtk3/pygobject has issues here as of Fedora 18
debug_ref_leaks = False
DETAILS_PERF = 1
DETAILS_CONFIG = 2
DETAILS_CONSOLE = 3
(PRIO_HIGH,
PRIO_LOW) = range(1, 3)
class vmmEngine(vmmGObject):
__gsignals__ = {
"conn-added": (GObject.SignalFlags.RUN_FIRST, None, [object]),
"conn-removed": (GObject.SignalFlags.RUN_FIRST, None, [str]),
}
def __init__(self):
vmmGObject.__init__(self)
self.windowConnect = None
self.windowPreferences = None
self.windowAbout = None
self.windowCreate = None
self.windowManager = None
self.windowMigrate = None
self.conns = {}
self.err = vmmErrorDialog()
self.t
|
imer = None
self.last_timeout = 0
self.systray = None
self.delete_dialog = None
self.application = Gtk.Application(
application_id="com.redhat.virt-manager",
|
flags=0)
self.application.connect("activate", self._activate)
self._appwindow = Gtk.Window()
self._tick_counter = 0
self._tick_thread_slow = False
self._tick_thread = threading.Thread(name="Tick thread",
target=self._handle_tick_queue,
args=())
self._tick_thread.daemon = True
self._tick_queue = Queue.PriorityQueue(100)
self.inspection = None
self._create_inspection_thread()
# Counter keeping track of how many manager and details windows
# are open. When it is decremented to 0, close the app or
# keep running in system tray if enabled
self.windows = 0
# Public bits set by virt-manager cli
self.skip_autostart = False
self.uri_at_startup = None
self.uri_cb = None
self.show_manager_window = True
self.init_systray()
self.add_gconf_handle(
self.config.on_stats_update_interval_changed(self.reschedule_timer))
self.add_gconf_handle(
self.config.on_view_system_tray_changed(self.system_tray_changed))
self.schedule_timer()
self.load_stored_uris()
self._tick_thread.start()
self.tick()
def _activate(self, ignore):
if self.show_manager_window:
self.show_manager()
else:
self.get_manager()
self.application.add_window(self._appwindow)
if self.uri_at_startup:
conn = self.make_conn(self.uri_at_startup)
self.register_conn(conn, skip_config=True)
if conn and self.uri_cb:
conn.connect_opt_out("resources-sampled", self.uri_cb)
self.connect_to_uri(self.uri_at_startup)
if not self.skip_autostart:
self.autostart_conns()
def init_systray(self):
if self.systray:
return
self.systray = vmmSystray(self)
self.systray.connect("action-toggle-manager", self._do_toggle_manager)
self.systray.connect("action-suspend-domain", self._do_suspend_domain)
self.systray.connect("action-resume-domain", self._do_resume_domain)
self.systray.connect("action-run-domain", self._do_run_domain)
self.systray.connect("action-shutdown-domain", self._do_shutdown_domain)
self.systray.connect("action-reboot-domain", self._do_reboot_domain)
self.systray.connect("action-destroy-domain", self._do_destroy_domain)
self.systray.connect("action-reset-domain", self._do_reset_domain)
self.systray.connect("action-save-domain", self._do_save_domain)
self.systray.connect("action-show-domain", self._do_show_vm)
self.systray.connect("action-migrate-domain", self._do_show_migrate)
self.systray.connect("action-delete-domain", self._do_delete_domain)
self.systray.connect("action-clone-domain", self._do_show_clone)
self.systray.connect("action-exit-app", self.exit_app)
def system_tray_changed(self, *ignore):
systray_enabled = self.config.get_view_system_tray()
if self.windows == 0 and not systray_enabled:
# Show the manager so that the user can control the application
self.show_manager()
def add_default_conn(self, manager):
# Only add default if no connections are currently known
if self.config.get_conn_uris():
return
self.timeout_add(1000, self._add_default_conn, manager)
def _add_default_conn(self, manager):
# Manager fail message
msg = _("Could not detect a default hypervisor. Make\n"
"sure the appropriate virtualization packages\n"
"are installed (kvm, qemu, libvirt, etc.), and\n"
"that libvirtd is running.\n\n"
"A hypervisor connection can be manually\n"
"added via File->Add Connection")
logging.debug("Determining default libvirt URI")
ret = None
try:
libvirt_packages = self.config.libvirt_packages
packages = self.config.hv_packages + libvirt_packages
ret = packageutils.check_packagekit(manager, manager.err, packages)
except:
logging.exception("Error talking to PackageKit")
if ret:
tryuri = "qemu:///system"
else:
tryuri = vmmConnect.default_uri(always_system=True)
if tryuri is None:
manager.set_startup_error(msg)
return
warnmsg = _("The 'libvirtd' service will need to be started.\n\n"
"After that, virt-manager will connect to libvirt on\n"
"the next application start up.")
# Do the initial connection in an idle callback, so the
# packagekit async dialog has a chance to go away
def idle_connect():
do_start = packageutils.start_libvirtd()
connected = self.connect_to_uri(tryuri,
autoconnect=True, do_start=do_start)
if not connected and do_start:
manager.err.ok(_("Libvirt service must be started"), warnmsg)
self.idle_add(idle_connect)
def load_stored_uris(self):
uris = self.config.get_conn_uris()
if not uris:
return
logging.debug("About to
|
keelerm84/powerline
|
powerline/segments/plugin/ctrlp.py
|
Python
|
mit
| 2,722
| 0.031227
|
# vim:fileencoding=utf-8:noet
try:
import vim
except ImportError:
vim = object() # NOQA
from powerline.bindings.vim import getbufvar
from powerline.segments.vim import window_cached
@window_cached
def ctrlp(pl, side):
'''
Highlight groups used: ``ctrlp.regex`` or ``background``, ``ctrlp.prev`` or ``background``, ``ctrlp.item`` or ``file_name``, ``ctrlp.next`` or ``background``, ``ctrlp.marked`` or ``background``, ``ctrlp.focus`` or ``background``, ``ctrlp.byfname`` or ``background``, ``ctrlp.progress`` or ``file_name``, ``ctrlp.progress`` or ``file_name``.
'''
ctrlp_type = getbufvar('%', 'powerline_ctrlp_type')
ctrlp_args = getbufvar('%', 'powerline_ctrlp_args')
return globals()['ctrlp_stl_{0}_{1}'.format(side, ctrlp_type)](pl, *ctrlp_args)
def ctrlp_stl_left_main(pl, focus, byfname, regex, prev, item, next, marked):
'''
Highlight groups used: ``ctrlp.regex`` or ``background``, ``ctrlp.prev`` or ``background``, ``ctrlp.item`` or ``file_name``, ``ctrlp.next`` or ``background``, ``ctrlp.marked`` or ``background``.
'''
marked = marked[2:-1]
segments = []
if int(regex):
segments.append({
'contents':
|
'regex',
'highlight_group': ['ctrlp.regex', 'background'],
})
segments += [
{
'contents': prev + ' ',
'highlight_group': ['ctrlp.prev', 'background'],
'draw_inner_divider': True,
'priority': 40,
},
{
'contents': item,
'highlight_group': ['ctrlp.item', 'file_name'],
'draw_inner_divider': True,
'width': 10,
'align': 'c',
},
{
'contents': ' ' + next,
'highlight_group': ['ctrlp.next', 'background'],
'draw_inner_divider': True,
'priority': 40,
|
},
]
if marked != '-':
segments.append({
'contents': marked,
'highlight_group': ['ctrlp.marked', 'background'],
'draw_inner_divider': True,
})
return segments
def ctrlp_stl_right_main(pl, focus, byfname, regex, prev, item, next, marked):
'''
Highlight groups used: ``ctrlp.focus`` or ``background``, ``ctrlp.byfname`` or ``background``.
'''
segments = [
{
'contents': focus,
'highlight_group': ['ctrlp.focus', 'background'],
'draw_inner_divider': True,
'priority': 50,
},
{
'contents': byfname,
'highlight_group': ['ctrlp.byfname', 'background'],
'priority': 50,
},
]
return segments
def ctrlp_stl_left_prog(pl, progress):
'''
Highlight groups used: ``ctrlp.progress`` or ``file_name``.
'''
return [
{
'contents': 'Loading...',
'highlight_group': ['ctrlp.progress', 'file_name'],
},
]
def ctrlp_stl_right_prog(pl, progress):
'''
Highlight groups used: ``ctrlp.progress`` or ``file_name``.
'''
return [
{
'contents': progress,
'highlight_group': ['ctrlp.progress', 'file_name'],
},
]
|
domi-id/across
|
across/res.py
|
Python
|
mit
| 2,378
| 0.002103
|
#!/usr/bin/env python3
import os
from construct import Adapter, Const, GreedyBytes, Int32ul, Struct, this
from .common import ZeroString, PreallocatedArray, test_folder, mkdir_p
from .encryption import EncryptedBlock
RES_ENCRYPTION = 23, 9782, 3391, 31
# noinspection PyPep8,PyUnresolvedReferences
ResourceEntry = Struct(
"name" / ZeroString(16),
"offset" / Int32ul,
"size" / Int32ul
)
# noinspection PyPep8,PyUnresolvedReferences
RawResourceFile = Struct(
"files_num" / Int32ul,
"file_table" / EncryptedBlock(RES_ENCRYPTION,
PreallocatedArray(150, this.files_num, ResourceEntry)),
Const(Int32ul, 0x1490ff),
"raw_data" / GreedyBytes
)
class ResourceFileAdapter(Adapter):
"""
Discards header and returns a dict of {file_name: file_data}
"""
HEADER_SIZE = 3608 # 4 + 150 * 24 + 4
def _decode(self, obj, context):
shift = self.HEADER_SIZE
return {f.name: obj.raw_data[f.offset - shift:f.offset + f.size - shift]
for f in obj.file_table}
def _encode(self, files, context):
file_table = []
raw_data = b""
last_offset = self.HEADER_SIZE
for file_name, file_data in files.items():
file_table.append({"name": file_name,
"offset": last_offset,
"size": len(file_data)})
last_offset += len(file_data)
raw_data += file_data
return {"files_num": len(files),
"file_table": file_table,
"raw_data": raw_data}
ResourceFile = Resourc
|
eFileAdapter(RawResourceFile)
def unpack_res(file_path, dir_path):
with open(file_path) as f:
|
data = f.read()
res = ResourceFile.parse(data)
mkdir_p(dir_path)
for file_name, file_data in res.items():
with open(os.path.join(dir_path, file_name), "wb") as f:
f.write(file_data)
def pack_res(dir_path, file_path):
res_content = {}
for root, dirs, files in os.walk(dir_path):
for name in files:
with open(os.path.join(root, name)) as f:
res_content[name] = f.read()
break
with open(file_path, "wb") as f:
f.write(ResourceFile.build(res_content))
if __name__ == "__main__":
import sys
test_folder(sys.argv[1], ".res", ResourceFile)
|
DailyActie/Surrogate-Model
|
01-codes/OpenMDAO-Framework-dev/openmdao.test/src/openmdao/test/plugins/foo2/foo.py
|
Python
|
mit
| 162
| 0
|
cl
|
ass Comp1Plugin(object):
def __init__(self):
self.version = '1.4'
class Comp2Plugin(ob
|
ject):
def __init__(self):
self.version = '1.4'
|
JustinWingChungHui/electionleaflets
|
electionleaflets/apps/people/models.py
|
Python
|
mit
| 1,603
| 0.001871
|
from django.db import models
from constituencies.models import Constituency
from uk_political_parties.models import Party
from elections.models import Election
class Person(models.Model):
name = models.CharField(blank=False, max_length=255)
remote_id = models.CharField(blank=True, max_length=255, null=True)
source_url = models.URLField(b
|
lank=True, null=True)
source_name = models.CharField(blank=True, max_length=100)
image_url = models.URLField(blank=True, null=True)
elections = models.ManyToManyField(Election)
parties = models.ManyToManyField(Party, through='PartyMemberships')
constituencies = models.ManyToManyField(Constituency, through='PersonConstituenci
|
es')
@property
def current_party(self):
parties = self.partymemberships_set.filter(membership_end=None)
if parties:
return parties[0]
@property
def current_election(self):
return self.elections.filter(active=True)[0]
@property
def current_constituency(self):
return self.constituencies.filter(
personconstituencies__election=self.current_election)[0]
def __unicode__(self):
return "%s (%s)" % (self.name, self.remote_id)
class PartyMemberships(models.Model):
person = models.ForeignKey(Person)
party = models.ForeignKey(Party)
membership_start = models.DateField()
membership_end = models.DateField(null=True)
class PersonConstituencies(models.Model):
person = models.ForeignKey(Person)
constituency = models.ForeignKey(Constituency)
election = models.ForeignKey(Election)
|
fedorlol/Tolyan
|
bot/__main__.py
|
Python
|
gpl-3.0
| 36
| 0
|
from bot.server impo
|
rt main
main
|
()
|
saisankargochhayat/algo_quest
|
hackerearth/segment_tree_problem.py
|
Python
|
apache-2.0
| 1,757
| 0.036426
|
import math
class segment_tree():
def __init__(self,a):
self.a = a
self.root = self.build(0,len(a)-1)
def build(self,left,right):
if left == right:
node = {}
node['value'] = self.a[left]
node['left'] = None
node['right'] = None
return node
else:
node = {}
mid = int((left+right)/2)
node['left'] = self.build(left,mid)
node['right'] = self.build(mid+1,right)
node['value'] = min(node['left']['value'],node['right']['value'])
return node
def update(self,node,index,new_value,left,right):
if left==right:
self.a[index] = new_value
node['value'] = new_value
else:
mid = int((left+right)/2)
if left<=index and index<=mid:
self.update(node['left'],index,new_value,left,mid)
else:
self.update(node['right'],index,new
|
_value,mid+1,right)
node['value'] = min(node['left']['value'] , node['right']['value'])
def query(self,root,start,end,left,right):
if start>right or end<left:
return float('inf')
if start<=left and right<=end:
return root['value']
mid = int((left+right)/2)
return min(self.query(root['left'],start,end,left,mid),self.query(root['right'],start,end,mid+1,right))
n,q = input().split(' ')
n,q = int(n),int(q)
a = list(map(int,input().sp
|
lit(' ')))
s = segment_tree(a)
for i in range(q):
query,left,right = input().split(' ')
left,right = int(left)-1,int(right)-1
if query == 'q':
print(s.query(s.root,left,right,0,n-1))
else:
s.update(s.root,left,right+1,0,n-1)
|
yanchen036/tensorflow
|
tensorflow/contrib/distributions/python/ops/bijectors/weibull.py
|
Python
|
apache-2.0
| 5,266
| 0.003608
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Weibull bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bijector
__all__ = [
"Weibull",
]
class Weibull(bijector.Bijector):
"""Compute `Y = g(X) = 1 - exp((-X / scale) ** concentration), X >= 0`.
This bijector maps inputs from `[0, inf]` to [0, 1]`. The inverse of the
bijector applied to a uniform random variable `X ~ U(0, 1) gives back a
random variable with the
[Weibull distribution](https://en.wikipedia.org/wiki/Weibull_distribution):
```none
Y ~ Weibull(scale, concentration)
pdf(y; scale, concentration, y >= 0) = (scale / concentration) * (
scale / concentration) ** (concentration - 1) * exp(
-(y / scale) ** concentration)
```
"""
def __init__(self,
scale=1.,
concentration=1.,
validate_args=False,
name="weibull"):
"""Instantiates the `Weibull` bijector.
Args:
scale: Positive Float-type `Tensor` that is the same dtype and is
broadcastable with `concentration`.
This is `l` in `Y = g(X) = 1 - exp((-x / l) ** k)`.
concentration: Positive Float-type `Tensor` that
|
is the same dtype and is
broadcastable with `scale`.
This is `k` in `Y = g(X) = 1 - exp((-x / l) ** k)`.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
self._graph_parents = []
self._name = name
self._validate_args = validate_args
w
|
ith self._name_scope("init", values=[scale, concentration]):
self._scale = ops.convert_to_tensor(scale, name="scale")
self._concentration = ops.convert_to_tensor(
concentration, name="concentration")
check_ops.assert_same_float_dtype([self._scale, self._concentration])
if validate_args:
self._scale = control_flow_ops.with_dependencies([
check_ops.assert_positive(
self._scale,
message="Argument scale was not positive")
], self._scale)
self._concentration = control_flow_ops.with_dependencies([
check_ops.assert_positive(
self._concentration,
message="Argument concentration was not positive")
], self._concentration)
super(Weibull, self).__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
name=name)
@property
def scale(self):
"""The `l` in `Y = g(X) = 1 - exp((-x / l) ** k)`."""
return self._scale
@property
def concentration(self):
"""The `k` in `Y = g(X) = 1 - exp((-x / l) ** k)`."""
return self._concentration
def _forward(self, x):
x = self._maybe_assert_valid_x(x)
return -math_ops.expm1(-((x / self.scale) ** self.concentration))
def _inverse(self, y):
y = self._maybe_assert_valid_y(y)
return self.scale * (-math_ops.log1p(-y)) ** (1 / self.concentration)
def _inverse_log_det_jacobian(self, y):
y = self._maybe_assert_valid_y(y)
return (
-math_ops.log1p(-y) +
(1 / self.concentration - 1) * math_ops.log(-math_ops.log1p(-y)) +
math_ops.log(self.scale / self.concentration))
def _forward_log_det_jacobian(self, x):
x = self._maybe_assert_valid_x(x)
return (
-(x / self.scale) ** self.concentration +
(self.concentration - 1) * math_ops.log(x) +
math_ops.log(self.concentration) +
-self.concentration * math_ops.log(self.scale))
def _maybe_assert_valid_x(self, x):
if not self.validate_args:
return x
is_valid = check_ops.assert_non_negative(
x,
message="Forward transformation input must be at least 0.")
return control_flow_ops.with_dependencies([is_valid], x)
def _maybe_assert_valid_y(self, y):
if not self.validate_args:
return y
is_positive = check_ops.assert_non_negative(
y, message="Inverse transformation input must be greater than 0.")
less_than_one = check_ops.assert_less_equal(
y, constant_op.constant(1., y.dtype),
message="Inverse transformation input must be less than or equal to 1.")
return control_flow_ops.with_dependencies([is_positive, less_than_one], y)
|
jaymin-panchal/zang-python
|
zang/inboundxml/elements/play.py
|
Python
|
mit
| 599
| 0
|
# -*- coding: utf-8 -*-
"""
zang.inboundxm
|
l.elemen
|
ts.play
~~~~~~~~~~~~~~~~~~~
Module containing `Play` inbound xml element
"""
from zang.inboundxml.elements.base_node import BaseNode
class Play(BaseNode):
_allowedContentClass = ()
def __init__(self, url, loop=None):
if url is None:
raise TypeError
self._value = url
self.loop = loop
self._content = None
@property
def url(self):
return self._value
@url.setter
def url(self, value):
if value is None:
raise TypeError
self._value = value
|
skycucumber/Messaging-Gateway
|
webapp/venv/lib/python2.7/site-packages/twisted/words/test/test_jabbersasl.py
|
Python
|
gpl-2.0
| 8,748
| 0.002743
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from zope.interface import implements
from twisted.internet import defer
from twisted.trial import unittest
from twisted.words.protocols.jabber import sasl, sasl_mechanisms, xmlstream, jid
from twisted.words.xish import domish
NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
class DummySASLMechanism(object):
"""
Dummy SASL mechanism.
This just returns the initialResponse passed on creation, stores any
challenges and replies with an empty response.
@ivar challenge: Last received challenge.
@type challenge: C{unicode}.
@ivar initialResponse: Initial response to be returned when requested
via C{getInitialResponse} or C{None}.
@type initialResponse: C{unicode}
"""
implements(sasl_mechanisms.ISASLMechanism)
challenge = None
name = "DUMMY"
def __init__(self, initialResponse):
self.initialResponse = initialResponse
def getInitialResponse(self):
return self.initialResponse
def getResponse(self, challenge):
self.challenge = challenge
return ""
class DummySASLInitiatingInitializer(sasl.SASLInitiatingInitializer):
"""
Dummy SASL Initializer for initiating entities.
This hardwires the SASL mechanism to L{DummySASLMechanism}, that is
instantiated with the value of C{initialResponse}.
@ivar initialResponse: The initial response to be returned by the
dummy SASL mechanism or C{None}.
@type initialResponse: C{unicode}.
"""
initialResponse = None
def setMechanism(self):
self.mechanism = DummySASLMechanism(self.initialResponse)
class SASLInitiatingInitializerTest(unittest.TestCase):
"""
Tests for L{sasl.SASLInitiatingInitializer}
"""
def setUp(self):
self.output = []
self.authenticator = xmlstream.Authenticator()
self.xmlstream = xmlstream.XmlStream(self.authenticator)
self.xmlstream.send = self.output.append
self.xmlstream.connectionMade()
|
self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
"xmlns:stream='http://etherx.jabber.org/st
|
reams' "
"from='example.com' id='12345' version='1.0'>")
self.init = DummySASLInitiatingInitializer(self.xmlstream)
def test_onFailure(self):
"""
Test that the SASL error condition is correctly extracted.
"""
failure = domish.Element(('urn:ietf:params:xml:ns:xmpp-sasl',
'failure'))
failure.addElement('not-authorized')
self.init._deferred = defer.Deferred()
self.init.onFailure(failure)
self.assertFailure(self.init._deferred, sasl.SASLAuthError)
self.init._deferred.addCallback(lambda e:
self.assertEqual('not-authorized',
e.condition))
return self.init._deferred
def test_sendAuthInitialResponse(self):
"""
Test starting authentication with an initial response.
"""
self.init.initialResponse = "dummy"
self.init.start()
auth = self.output[0]
self.assertEqual(NS_XMPP_SASL, auth.uri)
self.assertEqual('auth', auth.name)
self.assertEqual('DUMMY', auth['mechanism'])
self.assertEqual('ZHVtbXk=', str(auth))
def test_sendAuthNoInitialResponse(self):
"""
Test starting authentication without an initial response.
"""
self.init.initialResponse = None
self.init.start()
auth = self.output[0]
self.assertEqual('', str(auth))
def test_sendAuthEmptyInitialResponse(self):
"""
Test starting authentication where the initial response is empty.
"""
self.init.initialResponse = ""
self.init.start()
auth = self.output[0]
self.assertEqual('=', str(auth))
def test_onChallenge(self):
"""
Test receiving a challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkgY2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertEqual('my challenge', self.init.mechanism.challenge)
self.init.onSuccess(None)
return d
def test_onChallengeEmpty(self):
"""
Test receiving an empty challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
self.init.onChallenge(challenge)
self.assertEqual('', self.init.mechanism.challenge)
self.init.onSuccess(None)
return d
def test_onChallengeIllegalPadding(self):
"""
Test receiving a challenge message with illegal padding.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkg=Y2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
def test_onChallengeIllegalCharacters(self):
"""
Test receiving a challenge message with illegal characters.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('bXkg*Y2hhbGxlbmdl')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
def test_onChallengeMalformed(self):
"""
Test receiving a malformed challenge message.
"""
d = self.init.start()
challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
challenge.addContent('a')
self.init.onChallenge(challenge)
self.assertFailure(d, sasl.SASLIncorrectEncodingError)
return d
class SASLInitiatingInitializerSetMechanismTest(unittest.TestCase):
"""
Test for L{sasl.SASLInitiatingInitializer.setMechanism}.
"""
def setUp(self):
self.output = []
self.authenticator = xmlstream.Authenticator()
self.xmlstream = xmlstream.XmlStream(self.authenticator)
self.xmlstream.send = self.output.append
self.xmlstream.connectionMade()
self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
"xmlns:stream='http://etherx.jabber.org/streams' "
"from='example.com' id='12345' version='1.0'>")
self.init = sasl.SASLInitiatingInitializer(self.xmlstream)
def _setMechanism(self, name):
"""
Set up the XML Stream to have a SASL feature with the given mechanism.
"""
feature = domish.Element((NS_XMPP_SASL, 'mechanisms'))
feature.addElement('mechanism', content=name)
self.xmlstream.features[(feature.uri, feature.name)] = feature
self.init.setMechanism()
return self.init.mechanism.name
def test_anonymous(self):
"""
Test setting ANONYMOUS as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('example.com')
self.authenticator.password = None
name = "ANONYMOUS"
self.assertEqual(name, self._setMechanism(name))
def test_plain(self):
"""
Test setting PLAIN as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example.com')
self.authenticator.password = 'secret'
name = "PLAIN"
self.assertEqual(name, self._setMechanism(name))
def test_digest(self):
"""
Test setting DIGEST-MD5 as the authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example.com')
self.authenticator.password = 'secret'
name = "DIGEST-MD5"
self.assertEqual(name, self._setMechanism(name))
def test_notAcceptable(self):
"""
Test using an unacceptable SASL authentication mechanism.
"""
self.authenticator.jid = jid.JID('test@example
|
LuisUrrutia/hackerrank
|
python/introduction/python-arithmetic-operators.py
|
Python
|
mit
| 126
| 0.007937
|
if __name_
|
_ == '__main__':
a = int(raw_input())
b = int(raw_input())
print a + b
print a - b
|
print a * b
|
wcmitchell/insights-core
|
insights/parsers/manila_conf.py
|
Python
|
apache-2.0
| 1,598
| 0.001252
|
"""
Manila configuration - file ``/etc/manila/manila.conf``
================
|
=======================================
The Manila configuration file is a standard '.ini' file and this parser uses
the ``IniConfigFile`` class to read it.
Sample configuration::
[DEFAULT]
osapi_max_limit = 1000
osapi_share_base_URL = <None>
use_forwarded_for = false
api_paste_config = api-paste.ini
state_path = /v
|
ar/lib/manila
scheduler_topic = manila-scheduler
share_topic = manila-share
share_driver = manila.share.drivers.generic.GenericShareDriver
enable_v1_api = false
enable_v2_api = false
[cors]
allowed_origin = <None>
allow_credentials = true
expose_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
allow_methods = GET,POST,PUT,DELETE,OPTIONS
allow_headers = Content-Type,Cache-Control,Content-Language,Expires,Last-Modified,Pragma
Examples:
>>> conf = shared[ManilaConf]
>>> conf.sections()
['DEFAULT', 'cors']
>>> 'cors' in conf
True
>>> conf.has_option('DEFAULT', 'share_topic')
True
>>> conf.get("DEFAULT", "share_topic")
"manila-share"
>>> conf.get("DEFAULT", "enable_v2_api")
"false"
>>> conf.getboolean("DEFAULT", "enable_v2_api")
False
>>> conf.getint("DEFAULT", "osapi_max_limit")
1000
"""
from .. import parser, IniConfigFile
from insights.specs import manila_conf
@parser(manila_conf)
class ManilaConf(IniConfigFile):
"""
Manila configuration parser class, based on the ``IniConfigFile`` class.
"""
pass
|
fightingwalrus/gerbmerge
|
gerbmerge/jobs.py
|
Python
|
gpl-3.0
| 52,296
| 0.015049
|
#!/usr/bin/env python
"""
This module reads all Gerber and Excellon files and stores the
data for each job.
--------------------------------------------------------------------
This program is licensed under the GNU General Public License (GPL)
Version 3. See http://www.fsf.org for details of the license.
Rugged Circuits LLC
http://ruggedcircuits.com/gerbmerge
"""
import sys
import re
import string
import __builtin__
import copy
import types
import aptable
import config
import makestroke
import amacro
import geometry
import util
# Parsing Gerber/Excellon files is currently very brittle. A more robust
# RS274X/Excellon parser would be a good idea and allow this program to work
# robustly with more than just Eagle CAM files.
# Reminder to self:
#
# D01 -- move and draw line with exposure on
# D02 -- move with exposure off
# D03 -- flash aperture
# Patterns for Gerber RS274X file interpretation
apdef_pat = re.compile(r'^%AD(D\d+)([^*$]+)\*%$') # Aperture definition
apmdef_pat = re.compile(r'^%AM([^*$]+)\*$') # Aperture macro definition
comment_pat = re.compile(r'G0?4[^*]*\*') # Comment (GerbTool comment omits the 0)
tool_pat = re.compile(r'(D\d+)\*') # Aperture selection
gcode_pat = re.compile(r'G(\d{1,2})\*?') # G-codes
drawXY_pat = re.compile(r'X([+-]?\d+)Y([+-]?\d+)D0?([123])\*') # Drawing command
drawX_pat = re.compile(r'X([+-]?\d+)D0?([123])
|
\*') # Drawing command, Y is implied
drawY_pat = re.compile(r'Y([+-]?\d+)D0?([123])\*') # Drawing command, X is implied
format_pat = re.compile(r'%FS(L|T)?(A|I)(N\d+)?(X\d\d)(Y\d\d)\*%') # Format statement
layerpol_pat = re.compile(r'^%LP[CD]\*%') # Layer polarity (D=dark, C=clear)
# Circular interpolation
|
drawing commands (from Protel)
cdrawXY_pat = re.compile(r'X([+-]?\d+)Y([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*')
cdrawX_pat = re.compile(r'X([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*') # Y is implied
cdrawY_pat = re.compile(r'Y([+-]?\d+)I([+-]?\d+)J([+-]?\d+)D0?([123])\*') # X is implied
IgnoreList = ( \
# These are for Eagle, and RS274X files in general
re.compile(r'^%OFA0B0\*%$'),
re.compile(r'^%IPPOS\*%'),
re.compile(r'^%AMOC8\*$'), # Eagle's octagon defined by macro with a $1 parameter
re.compile(r'^5,1,8,0,0,1\.08239X\$1,22\.5\*$'), # Eagle's octagon, 22.5 degree rotation
re.compile(r'^5,1,8,0,0,1\.08239X\$1,0\.0\*$'), # Eagle's octagon, 0.0 degree rotation
re.compile(r'^\*?%$'),
re.compile(r'^M0?2\*$'),
# These additional ones are for Orcad Layout, PCB, Protel, etc.
re.compile(r'\*'), # Empty statement
re.compile(r'^%IN.*\*%'),
re.compile(r'^%ICAS\*%'), # Not in RS274X spec.
re.compile(r'^%MOIN\*%'),
re.compile(r'^%ASAXBY\*%'),
re.compile(r'^%AD\*%'), # GerbTool empty aperture definition
re.compile(r'^%LN.*\*%') # Layer name
)
# Patterns for Excellon interpretation
xtool_pat = re.compile(r'^(T\d+)$') # Tool selection
xydraw_pat = re.compile(r'^X([+-]?\d+)Y([+-]?\d+)$') # Plunge command
xdraw_pat = re.compile(r'^X([+-]?\d+)$') # Plunge command, repeat last Y value
ydraw_pat = re.compile(r'^Y([+-]?\d+)$') # Plunge command, repeat last X value
xtdef_pat = re.compile(r'^(T\d+)(?:F\d+)?(?:S\d+)?C([0-9.]+)$') # Tool+diameter definition with optional
# feed/speed (for Protel)
xtdef2_pat = re.compile(r'^(T\d+)C([0-9.]+)(?:F\d+)?(?:S\d+)?$') # Tool+diameter definition with optional
# feed/speed at the end (for OrCAD)
xzsup_pat = re.compile(r'^INCH,([LT])Z$') # Leading/trailing zeros INCLUDED
XIgnoreList = ( \
re.compile(r'^%$'),
re.compile(r'^M30$'), # End of job
re.compile(r'^M48$'), # Program header to first %
re.compile(r'^M72$') # Inches
)
# A Job is a single input board. It is expected to have:
# - a board outline file in RS274X format
# - several (at least one) Gerber files in RS274X format
# - a drill file in Excellon format
#
# The board outline and Excellon filenames must be given separately.
# The board outline file determines the extents of the job.
class Job:
def __init__(self, name):
self.name = name
# Minimum and maximum (X,Y) absolute co-ordinates encountered
# in GERBER data only (not Excellon). Note that coordinates
# are stored in hundred-thousandsths of an inch so 9999999 is 99.99999
# inches.
self.maxx = self.maxy = -9999999 # in the case all coordinates are < 0, this will prevent maxx and maxy from defaulting to 0
self.minx = self.miny = 9999999
# Aperture translation table relative to GAT. This dictionary
# has as each key a layer name for the job. Each key's value
# is itself a dictionary where each key is an aperture in the file.
# The value is the key in the GAT. Example:
# apxlat['TopCopper']['D10'] = 'D12'
# apxlat['TopCopper']['D11'] = 'D15'
# apxlat['BottomCopper']['D10'] = 'D15'
self.apxlat = {}
# Aperture macro translation table relative to GAMT. This dictionary
# has as each key a layer name for the job. Each key's value
# is itself a dictionary where each key is an aperture macro name in the file.
# The value is the key in the GAMT. Example:
# apxlat['TopCopper']['THD10X'] = 'M1'
# apxlat['BottomCopper']['AND10'] = 'M5'
self.apmxlat = {}
# Commands are one of:
# A. strings for:
# - aperture changes like "D12"
# - G-code commands like "G36"
# - RS-274X commands like "%LPD*%" that begin with '%'
# B. (X,Y,D) triples comprising X,Y integers in the range 0 through 999999
# and draw commands that are either D01, D02, or D03. The character
# D in the triple above is the integer 1, 2, or 3.
# C. (X,Y,I,J,D,s) 6-tuples comprising X,Y,I,J integers in the range 0 through 999999
# and D as with (X,Y,D) triples. The 's' integer is non-zero to indicate that
# the (I,J) tuple is a SIGNED offset (for multi-quadrant circular interpolation)
# else the tuple is unsigned.
#
# This variable is, as for apxlat, a dictionary keyed by layer name.
self.commands = {}
# This dictionary stores all GLOBAL apertures actually needed by this
# layer, i.e., apertures specified prior to draw commands. The dictionary
# is indexed by layer name, and each dictionary entry is a list of aperture
# code strings, like 'D12'. This dictionary helps us to figure out the
# minimum number of apertures that need to be written out in the Gerber
# header of the merged file. Once again, the list of apertures refers to
# GLOBAL aperture codes in the GAT, not ones local to this layer.
self.apertures = {}
# Excellon commands are grouped by tool number in a dictionary.
# This is to help sorting all jobs and writing out all plunge
# commands for a single tool.
#
# The key to this dictionary is the full tool name, e.g., T03
# as a string. Each command is an (X,Y) integer tuple.
self.xcommands = {}
# This is a dictionary mapping LOCAL tool names (e.g., T03) to diameters
# in inches for THIS JOB. This dictionary will be initially empty
# for old-style Excellon files with no embedded tool sizes. The
# main program will construct this dictionary from the global tool
# table in this case, once all jobs have been read in.
self.xdiam = {}
# This is a mapping from tool name to diameter for THIS JOB
self.ToolList = None
# How many times to replicate this job if using auto-placement
self.Repeat = 1
# How many decimal digits of precision there are in the Excellon file.
# A value greater than 0 overrides the global ExcellonDecimals setting
# for this file, allowing jobs with different Excellon decimal settings
# to be combined.
self.ExcellonDecimals = 0 # 0 means global value prevails
def width_in(self):
"Return width in INCHES"
return float
|
sailfish-sdk/sailfish-qtcreator
|
tests/system/suite_tools/tst_codepasting/test.py
|
Python
|
gpl-3.0
| 11,988
| 0.004922
|
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
source("../../shared/qtcreator.py")
import random
from datetime import date
def __platformToBeRunToday__():
return (('Linux'), ('Darwin'), ('Microsoft', 'Windows'))[date.today().day % 3]
# Be careful with Pastebin.Com, there are only 10 pastes per 24h
# for all machines using the same IP-address like you.
skipPastingToPastebinCom = platform.system() not in __platformToBeRunToday__()
NAME_KDE = "Paste.KDE.Org"
NAME_PBCA = "Pastebin.Ca"
NAME_PBCOM = "Pastebin.Com"
NAME_PCXYZ = "Pastecode.Xyz"
serverProblems = "Server side problems."
def invalidPasteId(protocol):
if protocol == NAME_KDE:
return None
else:
return -1
def closeHTTPStatusAndPasterDialog(protocol, pasterDialog):
try:
mBoxStr = "{type='QMessageBox' unnamed='1' visible='1' windowTitle?='%s *'}" % protocol
mBox = waitForObject(mBoxStr, 1000)
text = str(mBox.text)
# close message box and paster window
clickButton("{type='QPushButton' text='Cancel' visible='1' window=%s}" % mBoxStr)
clickButton("{type='QPushButton' text='Cancel' visible='1' window='%s'}" % pasterDialog)
if 'Service Unavailable' in text:
test.warning(text)
return True
except:
t,v = sys.exc_info()[:2]
test.warning("An exception occurred in closeHTTPStatusAndPasterDialog(): %s(%s)"
% (str(t), str(v)))
test.log("Closed dialog without expected error.", text)
return False
def pasteFile(sourceFile, protocol):
def resetFiles():
clickButton(waitForObject(":*Qt Creator.Clear_QToolButton"))
invokeMenuItem('File', 'Revert "main.cpp" to Saved')
clickButton(waitForObject(":Revert to Saved.Proceed_QPushButton"))
snooze(1) # "Close All" might be disabled
invokeMenuItem("File", "Close All")
aut = currentApplicationContext()
invokeMenuItem("File", "Open File or Project...")
selectFromFileDialog(sourceFile)
editor = waitForObject(":Qt Creator_CppEditor::Internal::CPPEditorWidget")
jumpToFirstLine(editor)
typeLines(editor, "// tst_codepasting %s" % datetime.utcnow())
sourceText = editor.plainText
invokeMenuItem("Tools", "Code Pasting", "Paste Snippet...")
selectFromCombo(":Send to Codepaster.protocolBox_QComboBox", protocol)
pasteEditor = waitForObject(":stackedWidget.plainTextEdit_QPlainTextEdit")
test.compare(pasteEditor.plainText, sourceText, "Verify that dialog shows text from the editor")
description = "Description %s" % datetime.utcnow()
type(waitForObject(":uiDescription_QLineEdit"), description)
typeLines(pasteEditor, "// tst_codepasting %s" % datetime.utcnow())
pastedText = str(pasteEditor.plainText)
expiry = waitForObject(":Send to Codepaster.qt_spinbox_lineedit_QLineEdit")
expiryDays = random.randint(1, 10)
replaceEditorContent(expiry, "%d" % expiryDays)
test.log("Using expiry of %d days." % expiryDays)
# make sure to read all former errors (they won't get read twice)
aut.readStderr()
clickButton(waitForObject(":Send to Codepaster.Paste_QPushButton"))
try:
outputWindow = waitForObject(":Qt Creator_Core::OutputWindow")
waitFor("'https://' in str(outputWindow.plainText)", 20000)
output = str(outputWindow.plainText).splitlines()[-1]
except:
output = ""
if closeHTTPStatusAndPasterDialog(protocol, ':Send to Codepaster_CodePaster::PasteView'):
resetFiles()
raise Exception(serverProblems)
stdErrOut = aut.readStderr()
match = re.search("^%s protocol error: (.*)$" % protocol, stdErrOut, re.MULTILINE)
if match:
pasteId = invalidPasteId(protocol)
if "Internal Server Error" in match.group(1):
test.warning("Server Error - trying to continue...")
else:
test.fail("%s protocol error: %s" % (protocol, match.group(1)))
elif output.strip() == "":
pasteId = invalidPasteId(protocol)
elif "FAIL:There was an error communicating with the database" in output:
resetFiles()
raise Exception(serverProblems)
elif "Post limit, maximum pastes per 24h reached" in output:
test.warning("Maximum pastes per day exceeded.")
pasteId = None
else:
pasteId = output.rsplit("/", 1)[1]
resetFiles()
return pasteId, description, pastedText
def fetchSnippet(protocol, description, pasteId, skippedPasting):
foundSnippet = True
invokeMenuItem("Tools", "Code Pasting", "Fetch Snippet...")
selectFromCombo(":PasteSelectDialog.protocolBox_QComboBox", protocol)
try:
pasteModel = waitForObject(":PasteSelectDialog.listWidget_QListWidget").model()
except:
closeHTTPStatusAndPasterDialog(protocol, ':PasteSelectDialog_CodePaster::PasteSelectDialog')
return -1
waitFor("pasteModel.rowCount() > 1", 20000)
if (not skippedPasting and not protocol == NAME_PBCA
and not any(map(lambda str:pasteId in str, dumpItems(pasteModel)))):
test.warning("Fetching too fast for server of %s - waiting 3s and trying to refresh." % protocol)
snooze(3)
clickButton("{text='Refresh' type='QPushButton' unnamed='1' visible='1' "
"window=':PasteSelectDialog_CodePaster::PasteSelectDialog'}")
waitFor("pasteModel.rowCount() == 1", 1000)
waitFor("pasteModel.rowCount() > 1", 20000)
if protocol == NAME_PBCA:
description = description[:32]
if pasteId == -1:
try:
pasteLine = filter(lambda str:description in str, dumpItems(pasteModel))[0]
pasteId = pasteLine.split(" ", 1)[0]
except:
test.fail("Could not find description line in list of pastes from %s" % protocol)
clickButton(waitForObject(":PasteSelectDialog.Cancel_QPushButton"))
return pasteId
else:
try:
pasteLine = filter(lambda str:pasteId in str, dumpItems(pasteModel))[0]
if protocol in (NAME_PBCA, NAME_PBCOM):
test.verify(description in
|
pasteLine,
"Verify that line in list of pastes contains the descripti
|
on")
except:
if protocol == NAME_PBCA:
test.xfail("%s does not list the pasted snippet on time" % NAME_PBCA)
elif not skippedPasting:
test.fail("Could not find id '%s' in list of pastes from %s" % (pasteId, protocol))
foundSnippet = False
replaceEditorContent(waitForObject(":PasteSelectDialog.pasteEdit_QLineEdit"), pasteId)
if foundSnippet:
pasteLine = pasteLine.replace(".", "\\.")
waitForObjectItem(":PasteSelectDialog.listWidget_QListWidget", pasteLine)
clickItem(":PasteSelectDialog.listWidget_QListWidget", pasteLine, 5, 5, 0, Qt.LeftButton)
clickButton(waitForObject(":PasteSelectDialog.OK_QPushButton"))
return pasteId
def main():
startQC()
if not startedWithoutPluginError():
return
protocolsTo
|
olivierlemasle/murano
|
murano/tests/unit/dsl/test_statics.py
|
Python
|
apache-2.0
| 4,079
| 0
|
# Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.tests.unit.dsl.foundation import object_model as om
from murano.tests.unit.dsl.foundation import test_case
class TestStatics(test_case.DslTestCase):
def setUp(self):
super(TestStatics, self).setUp()
self._runner = self.new_runner(
om.Object('test.TestStatics', staticProperty2='INVALID'))
def test_call_static_method_on_object(self):
self.assertEqual(123, self._runner.testCallStaticMethodOnObject())
|
def test_call_static_method_on_class_name(self):
self.assertEqual(123, self._runner.testCallStaticMethodOnClassName())
def test_call_static_method_on_class_name_with_ns(self):
self.assertEqual(
123, self._runner.testCallStaticMethodOnClassNameWithNs())
def test_call_static_method_from_another_method(self):
self.assertEqual(
123 * 5, self._
|
runner.testCallStaticMethodFromAnotherMethod())
def test_static_this(self):
self.assertIsInstance(
self._runner.testStaticThis(), dsl_types.MuranoTypeReference)
def test_no_access_to_instance_properties(self):
self.assertRaises(
exceptions.NoPropertyFound,
self._runner.testNoAccessToInstanceProperties)
def test_access_static_property_from_instance_method(self):
self.assertEqual(
'xxx', self._runner.testAccessStaticPropertyFromInstanceMethod())
def test_access_static_property_from_static_method(self):
self.assertEqual(
'xxx', self._runner.testAccessStaticPropertyFromStaticMethod())
def test_modify_static_property_using_dollar(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingDollar())
def test_modify_static_property_using_this(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingThis())
def test_modify_static_property_using_class_name(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingClassName())
def test_modify_static_property_using_ns_class_name(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingNsClassName())
def test_modify_static_property_using_type_func(self):
self.assertEqual(
'qq', self._runner.testModifyStaticPropertyUsingTypeFunc())
def test_property_is_static(self):
self.assertEqual('qq', self._runner.testPropertyIsStatic())
def test_static_properties_excluded_from_object_model(self):
self.assertEqual(
'staticProperty',
self._runner.testStaticPropertisNotLoaded())
def test_type_is_singleton(self):
self.assertTrue(self._runner.testTypeIsSingleton())
def test_static_property_inheritance(self):
self.assertEqual(
'baseStaticProperty' * 3,
self._runner.testStaticPropertyInheritance())
def test_static_property_override(self):
self.assertEqual(
[
'conflictingStaticProperty-child',
'conflictingStaticProperty-child',
'conflictingStaticProperty-base',
'conflictingStaticProperty-child',
'conflictingStaticProperty-base'
], self._runner.testStaticPropertyOverride())
def test_type_info_of_type(self):
self.assertTrue(self._runner.testTypeinfoOfType())
|
RensaProject/nodebox_linguistics_extended
|
nodebox_linguistics_extended/parser/nltk_lite/contrib/paradigm.py
|
Python
|
gpl-2.0
| 24,313
| 0.00473
|
# Natural Language Toolkit: Paradigm Visualisation
#
# Copyright (C) 2005 University of Melbourne
# Author: Will Hardy
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
# Front end to a Python implementation of David
# Penton's paradigm visualisation model.
# Author:
#
# Run: To run, first load a paradigm using
# >>> a = paradigm('paradigm.xml')
# And run the system to produce output
# >>> a.show('table(one, two, three)')
#
# Other methods:
# demo() # a quick demonstration
# a.setFormat('html') # output is formatted as HTML
# a.setFormat('text') # output is formatted as HTML
# a.setOutput('filename') # output is sent to filename
# a.setOutput('term') # output is sent to terminal
from xml.dom.ext.reader import Sax2
from paradigmquery import ParadigmQuery
import re, os
class Paradigm(object):
"""
Paradigm visualisation class
*Usage*
Simple usage of the system would be:
>>> from paradigm import Paradigm
>>> p = Paradigm('german.xml')
>>> p.show('table(case, gender/number, content)')
Here, a table is generated in HTML format and sent to the file ``output.html``.
The table can be viewed in a browser, and is updated for every new query.
A more advanced usage of the system is show below.
The user simply creates a paradigm p, changes the output format and location,
and calls a dedicated prompt to enter the query:
>>> from paradigm import Paradigm
>>> p = Paradigm('german.xml')
>>> p.setFormat('html')
>>> p.setOutput('test.html')
>>> p.setCSS('simple.css')
>>> p.prompt()
> table(case, gender/number, content)
Please note, however, that plain text tables have not yet been implemented.
"""
def __init__(self, p_filename):
"""
Load the given paradigm
p_filename is a string representing the filename of a paradigm xml file
"""
# Store input paradigm filename
self.loadParadigm(p_filename)
# set default values (text output, to terminal)
self.format = "html"
self.output = "output.html"
self.css = "simple.css"
def prompt(self):
"""
Changes to a dedicated prompt
Type 'exit' or 'quit' to exit
"""
s = ""
while s != "exit":
s = "exit"
try: s = raw_input(">")
except EOFError:
print s
if s == "exit":
return
if s == "quit":
return
if s:
while s[-1] in "!.": s = s[:-1]
self.show(s)
def show(self, p_string):
"""
Process and display the given query
"""
try:
# parse the query
parse = ParadigmQuery(p_string)
except:
print "Could not parse query."
return
try:
# Fetch the parsed tree and make presentation
result = Sentence(self, parse.getTree())
# Check that a presentation actually exists
if result == None:
raise Error
except:
print "Sorry, no result can be returned"
return
try:
# Print HTML output if format is set, otherwise plain text
if self.format == "html":
output = '<html>\n'
# Include CSS if we need to
if self.css <> None:
output += '<link rel="stylesheet" href="'
output += self.css
output += '" type="text/css" media="screen" />\n'
output += '<body>'
outpu
|
t += "<table cellspacing=\"0\" cellpadding=\"0\">"
output += result.getHTML()
output += "</table>\n"
output += '</body></html>\n'
els
|
e:
output = result.getText()
except:
output = None
print "--no output--"
return
# Print to terminal if output is set, otherwise to file
if self.output == "term":
print output
else:
print "Output written to file:", self.output
f = open(self.output, 'w')
f.write(output)
# Return happily
return
def setFormat(self, p_string=None):
"""
Set the output format: "html" or "text"
"""
# Default value
if p_string == None:
p_string = "text"
# set to html if requested, otherwise text
if p_string == "html":
self.format = "html"
elif p_string == "text":
self.format = "text"
else:
print "Unknown format:", p_string
print "Valid formats are: text, html"
print "Setting format = text"
self.format = "text"
def setCSS(self, p_string=None):
"""
Set the file location for a Cascading Stylesheet: None or filename
This allows for simple formatting
"""
if p_string <> None:
print "Using CSS file:", p_string
self.output = p_string
def setOutput(self, p_string=None):
"""
Set the output location: "term" or filename
"""
# Default
if p_string == None:
p_string = "term"
# set to term if requested, otherwise filename
if p_string == "term":
print "Directing output to terminal"
else:
print "Directing output to file:", p_string
self.output = p_string
def loadParadigm(self, p_filename ):
"""
Load the given paradigm (XML file)
Attributes are stored in self.attributes
Data are stored in self.data
They can be accessed as follows:
self.attributes['gender'] # list of genders
self.data[6]['gender'] # gender for the sixth data object
self.data[6]['content'] # content for the sixth data object
"""
from nodebox_linguistics_extended.parser.nltk_lite.corpora import get_basedir
basedir = get_basedir()
# Look for the file
try_filename = os.path.join(get_basedir(), "paradigms", p_filename)
try:
f = open(try_filename)
p_filename = try_filename
except IOError:
print "Cannot find file"
return None
f.close()
# These variables will be set by this method
self.attributes = {} # A new dictionary
self.data = [] # A new list
# XML admin: create Reader object, parse document
reader = Sax2.Reader()
doc = reader.fromStream(p_filename)
# Cycle through the given attributes and add them to self.attributes
# for <name> in <attributes>
attributes = doc.getElementsByTagName('attributes')[0]
for name in attributes.getElementsByTagName('name'):
# Setup a list of attribute values
tmp_list = []
# for each value under name, store in list
for value in name.getElementsByTagName('value'):
tmp_list.append(value.getAttribute('value'))
# Store list of values in dictionary
self.attributes[name.getAttribute('name')] = tmp_list
# Cycle through data objects and add them to self.data
# for <form> in <paradigm>
forms = doc.getElementsByTagName('paradigm')[0]
for form in forms.getElementsByTagName('form'):
# Initialise a temporary dictionary
tmp_dict = {}
for value in form.getElementsByTagName('attribute'):
tmp_dict[value.getAttribute('name')] = value.getAttribute('value')
# Add the new dictionary to the data list
self.data.append(tmp_dict)
# Talk to the user
print "Paradigm information successfully loaded from file:", p_filename
# State the number and print out a list of attributes
print " "*4 + str(len(self.attributes)) + " attributes imported:",
for att in self.attributes:
pri
|
aron-bordin/kivy
|
kivy/core/window/__init__.py
|
Python
|
mit
| 57,469
| 0.000331
|
# pylint: disable=W0611
# coding: utf-8
'''
Window
======
Core class for creating the default Kivy window. Kivy supports only one window
per application: please don't try to create more than one.
'''
__all__ = ('Keyboard', 'WindowBase', 'Window')
from os.path import join, exists
from os import getcwd
from kivy.core import core_select_lib
from kivy.clock import Clock
from kivy.config import Config
from kivy.logger import Logger
from kivy.base import EventLoop, stopTouchApp
from kivy.modules import Modules
from kivy.event import EventDispatcher
from kivy.properties import ListProperty, ObjectProperty, AliasProperty, \
NumericProperty, OptionProperty, StringProperty, BooleanProperty
from kivy.utils import platform, reify, deprecated
from kivy.context import get_current_context
from kivy.uix.behaviors import FocusBehavior
from kivy.setupconfig import USE_SDL2
from kivy.graphics.transformation import Matrix
# late import
VKeyboard = None
android = None
Animation = None
class Keyboard(EventDispatcher):
'''Keyboard interface that is returned by
:meth:`WindowBase.request_keyboard`. When you request a keyboard,
you'll get an instance of this class. Whatever the keyboard input is
(system or virtual keyboard), you'll receive events through this
instance.
:Events:
`on_key_down`: keycode, text, modifiers
Fired when a new key is pressed down
`on_key_up`: keycode
Fired when a key is released (up)
Here is an example of how to request a Keyboard in accordance with the
current configuration:
.. include:: ../../examples/widgets/keyboardlistener.py
:literal:
'''
# Keycodes mapping, between str <-> int. These keycodes ar
|
e
# currently taken from pygame.key. But when a new provider will be
# used, it must do the translation to these keycodes too.
keycodes = {
# specials keys
'backspace': 8, 'tab': 9, 'enter': 13, 'rshift': 303, 'shift': 304,
'alt': 308, 'rctrl': 306, 'lctrl': 305,
'super': 309, 'alt-gr': 307, 'compose': 311, 'pipe': 310,
'capslock': 301, 'escape': 27, 'spacebar': 32, 'pageup': 280,
|
'pagedown': 281, 'end': 279, 'home': 278, 'left': 276, 'up':
273, 'right': 275, 'down': 274, 'insert': 277, 'delete': 127,
'numlock': 300, 'print': 144, 'screenlock': 145, 'pause': 19,
# a-z keys
'a': 97, 'b': 98, 'c': 99, 'd': 100, 'e': 101, 'f': 102, 'g': 103,
'h': 104, 'i': 105, 'j': 106, 'k': 107, 'l': 108, 'm': 109, 'n': 110,
'o': 111, 'p': 112, 'q': 113, 'r': 114, 's': 115, 't': 116, 'u': 117,
'v': 118, 'w': 119, 'x': 120, 'y': 121, 'z': 122,
# 0-9 keys
'0': 48, '1': 49, '2': 50, '3': 51, '4': 52,
'5': 53, '6': 54, '7': 55, '8': 56, '9': 57,
# numpad
'numpad0': 256, 'numpad1': 257, 'numpad2': 258, 'numpad3': 259,
'numpad4': 260, 'numpad5': 261, 'numpad6': 262, 'numpad7': 263,
'numpad8': 264, 'numpad9': 265, 'numpaddecimal': 266,
'numpaddivide': 267, 'numpadmul': 268, 'numpadsubstract': 269,
'numpadadd': 270, 'numpadenter': 271,
# F1-15
'f1': 282, 'f2': 283, 'f3': 284, 'f4': 285, 'f5': 286, 'f6': 287,
'f7': 288, 'f8': 289, 'f9': 290, 'f10': 291, 'f11': 292, 'f12': 293,
'f13': 294, 'f14': 295, 'f15': 296,
# other keys
'(': 40, ')': 41,
'[': 91, ']': 93,
'{': 123, '}': 125,
':': 58, ';': 59,
'=': 61, '+': 43,
'-': 45, '_': 95,
'/': 47, '*': 42,
'?': 47,
'`': 96, '~': 126,
'´': 180, '¦': 166,
'\\': 92, '|': 124,
'"': 34, "'": 39,
',': 44, '.': 46,
'<': 60, '>': 62,
'@': 64, '!': 33,
'#': 35, '$': 36,
'%': 37, '^': 94,
'&': 38, '¬': 172,
'¨': 168, '…': 8230,
'ù': 249, 'à': 224,
'é': 233, 'è': 232,
}
__events__ = ('on_key_down', 'on_key_up', 'on_textinput')
def __init__(self, **kwargs):
super(Keyboard, self).__init__()
#: Window which the keyboard is attached too
self.window = kwargs.get('window', None)
#: Callback that will be called when the keyboard is released
self.callback = kwargs.get('callback', None)
#: Target that have requested the keyboard
self.target = kwargs.get('target', None)
#: VKeyboard widget, if allowed by the configuration
self.widget = kwargs.get('widget', None)
def on_key_down(self, keycode, text, modifiers):
pass
def on_key_up(self, keycode):
pass
def on_textinput(self, text):
pass
def release(self):
'''Call this method to release the current keyboard.
This will ensure that the keyboard is no longer attached to your
callback.'''
if self.window:
self.window.release_keyboard(self.target)
def _on_window_textinput(self, instance, text):
return self.dispatch('on_textinput', text)
def _on_window_key_down(self, instance, keycode, scancode, text,
modifiers):
keycode = (keycode, self.keycode_to_string(keycode))
if text == '\x04':
Window.trigger_keyboard_height()
return
return self.dispatch('on_key_down', keycode, text, modifiers)
def _on_window_key_up(self, instance, keycode, *largs):
keycode = (keycode, self.keycode_to_string(keycode))
return self.dispatch('on_key_up', keycode)
def _on_vkeyboard_key_down(self, instance, keycode, text, modifiers):
if keycode is None:
keycode = text.lower()
keycode = (self.string_to_keycode(keycode), keycode)
return self.dispatch('on_key_down', keycode, text, modifiers)
def _on_vkeyboard_key_up(self, instance, keycode, text, modifiers):
if keycode is None:
keycode = text
keycode = (self.string_to_keycode(keycode), keycode)
return self.dispatch('on_key_up', keycode)
def _on_vkeyboard_textinput(self, instance, text):
return self.dispatch('on_textinput', text)
def string_to_keycode(self, value):
'''Convert a string to a keycode number according to the
:attr:`Keyboard.keycodes`. If the value is not found in the
keycodes, it will return -1.
'''
return Keyboard.keycodes.get(value, -1)
def keycode_to_string(self, value):
'''Convert a keycode number to a string according to the
:attr:`Keyboard.keycodes`. If the value is not found in the
keycodes, it will return ''.
'''
keycodes = list(Keyboard.keycodes.values())
if value in keycodes:
return list(Keyboard.keycodes.keys())[keycodes.index(value)]
return ''
class WindowBase(EventDispatcher):
'''WindowBase is an abstract window widget for any window implementation.
:Parameters:
`borderless`: str, one of ('0', '1')
Set the window border state. Check the
:mod:`~kivy.config` documentation for a
more detailed explanation on the values.
`fullscreen`: str, one of ('0', '1', 'auto', 'fake')
Make the window fullscreen. Check the
:mod:`~kivy.config` documentation for a
more detailed explanation on the values.
`width`: int
Width of the window.
`height`: int
Height of the window.
`minimum_width`: int
Minimum width of the window (only works for sdl2 window provider).
`minimum_height`: int
Minimum height of the window (only works for sdl2 window provider).
:Events:
`on_motion`: etype, motionevent
Fired when a new :class:`~kivy.input.motionevent.MotionEvent` is
dispatched
`on_touch_down`:
Fired when a new touch event is initiated.
`on_touch_move`:
Fired when an existing touch event changes location.
`on_touch_up`:
Fired when an existing touch event is terminated.
`on_draw`:
Fired when the :class:`Wi
|
EDRN/CancerDataExpo
|
src/edrn.rdf/edrn/rdf/upgrades.py
|
Python
|
apache-2.0
| 2,788
| 0.002154
|
# encoding: utf-8
# Copyright 2013–2017 California Institute of Technology. ALL RIGHTS
# RESERVED. U.S. Government Sponsorship acknowledged.
from .setuphandlers import publish
from edrn.rdf import DEFAULT_PROFILE
from plone.dexterity.utils import createContentInContainer
from edrn.rdf.labcascollectionrdfgenerator import ILabCASCollectionRDFGenerator
import plone.api
def nullUpgradeStep(setupTool):
'''A null step when a profile upgrade requires no custom activity.'''
def upgrade3to4(setupTool):
setupTool.runImportStepFromProfile(DEFAULT_PROFILE, 'typeinfo')
def upgrade4to5(setupTool):
# Note that I (kelly) went ahead and added these through the web to the
# runn
|
ing https://edrn.jpl.nasa.gov/cancerdataexpo/ so we could take
# immediate advantage of the new data without cutting a new release.
# This is provided just in case there is a disaster and we need to
# re-release.
portal = setupTool.getSite()
if 'rdf-generators' in list(portal.keys()):
rdfGenerators = portal['rdf-generators']
if 'person-generator' in list(rdfGenerators.keys()):
|
personGenerator = rdfGenerators['person-generator']
if 'staff_status' not in list(personGenerator.keys()):
predicate = createContentInContainer(
personGenerator,
'edrn.rdf.literalpredicatehandler',
title='Staff_Status',
description='''Maps from DMCC's Staff_Status to the EDRN-specific predicate for employmentActive.''',
predicateURI='http://edrn.nci.nih.gov/rdf/schema.rdf#employmentActive'
)
publish(predicate, plone.api.portal.get_tool('portal_workflow'))
if 'publications-generator' in list(rdfGenerators.keys()):
publicationsGenerator = rdfGenerators['publications-generator']
if 'siteid' not in list(publicationsGenerator.keys()):
predicate = createContentInContainer(
publicationsGenerator,
'edrn.rdf.referencepredicatehandler',
title='SiteID',
description='''Maps from the DMCC's SiteID to the EDRN-specific predicate for site ID.''',
predicateURI='http://edrn.nci.nih.gov/rdf/schema.rdf#site',
uriPrefix='http://edrn.nci.nih.gov/data/sites/'
)
publish(predicate, plone.api.portal.get_tool('portal_workflow'))
def upgrade5to6(setupTool):
catalog = plone.api.portal.get_tool('portal_catalog')
for brain in catalog(object_provides=ILabCASCollectionRDFGenerator.__identifier__):
obj = brain.getObject()
obj.labcasSolrURL = 'https://edrn-labcas.jpl.nasa.gov/data-access-api'
|
nihilus/src
|
pywraps/py_choose.py
|
Python
|
bsd-3-clause
| 1,595
| 0.016928
|
#<pycode(py_choose)>
class Choose:
"""
Choose - class for choose() with callbacks
"""
def __init__(self, list, title, flags=0, deflt=1, icon=37):
self.list = list
self.title = title
self.flags = flags
self.x0 = -1
self.x1 = -1
self.y0 = -1
self.y1 = -1
self.width = -1
self.deflt
|
= deflt
self.icon = icon
# HACK: Add a circular reference for non-modal choosers. This prevents the GC
# from collecting the class object the
|
callbacks need. Unfortunately this means
# that the class will never be collected, unless refhack is set to None explicitly.
if (flags & Choose2.CH_MODAL) == 0:
self.refhack = self
def sizer(self):
"""
Callback: sizer - returns the length of the list
"""
return len(self.list)
def getl(self, n):
"""
Callback: getl - get one item from the list
"""
if n == 0:
return self.title
if n <= self.sizer():
return str(self.list[n-1])
else:
return "<Empty>"
def ins(self):
pass
def update(self, n):
pass
def edit(self, n):
pass
def enter(self, n):
print "enter(%d) called" % n
def destroy(self):
pass
def get_icon(self, n):
pass
def choose(self):
"""
choose - Display the choose dialogue
"""
old = set_script_timeout(0)
n = _idaapi.choose_choose(
self,
self.flags,
self.x0,
self.y0,
self.x1,
self.y1,
self.width,
self.deflt,
self.icon)
set_script_timeout(old)
return n
#</pycode(py_choose)>
|
zhangyage/Python-oldboy
|
day13/Django_study/manage.py
|
Python
|
apache-2.0
| 255
| 0
|
#!/usr/
|
bin/env python
import os
import sys
if __name__ == "__
|
main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Django_study.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
hawkphantomnet/leetcode
|
PathSum/Solution.py
|
Python
|
mit
| 625
| 0.0048
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
|
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if root == None:
return False
if root.val == sum and root.left == None and root.right == None:
return True
left = self.hasPathSum(root.le
|
ft, sum - root.val)
right = self.hasPathSum(root.right, sum - root.val)
return (left or right)
|
AmandaMoen/AmandaMoen
|
code/session06/run_html_render.py
|
Python
|
gpl-2.0
| 5,015
| 0.004786
|
#!/usr/bin/env python
"""
a simple script can run and test your html rendering classes.
Uncomment the steps as you add to your rendering.
"""
import codecs
import cStringIO
# importing the html_rendering code with a short name for easy typing.
import html_render as hr
## writing the file out:
def render(page, filename):
"""
render the tree of elements
This uses cSstringIO to renderto memory, then dump to console and
write to file -- very handy!
"""
f = cStringIO.StringIO()
page.render(f)
f.reset()
print f.read()
f.reset()
codecs.open(filename, 'w', encoding="utf-8").write( f.read() )
## Step 1
##########
page = hr.Element()
page.append(u"Here is a paragraph of text -- ther
|
e could be more of them, but this is enough to show that we can do some text")
page.append(u"And here is another piece
|
of text -- you should be able to add any number")
render(page, u"test_html_output1.html")
# ## Step 2
# ##########
# page = hr.Html()
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text"))
# body.append(hr.P(u"And here is another piece of text -- you should be able to add any number"))
# page.append(body)
# render(page, u"test_html_output2.html")
# # Step 3
# ##########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text"))
# body.append(hr.P(u"And here is another piece of text -- you should be able to add any number"))
# page.append(body)
# render(page, u"test_html_output3.html")
# # Step 4
# ##########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# page.append(body)
# render(page, u"test_html_output4.html")
# # Step 5
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# page.append(body)
# render(page, u"test_html_output5.html")
# # Step 6
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# body.append(u"And this is a ")
# body.append( hr.A(u"http://google.com", "link") )
# body.append(u"to google")
# page.append(body)
# render(page, u"test_html_output6.html")
# # Step 7
# #########
# page = hr.Html()
# head = hr.Head()
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append( hr.H(2, u"PythonClass - Class 6 example") )
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# list = hr.Ul(id=u"TheList", style=u"line-height:200%")
# list.append( hr.Li(u"The first item in a list") )
# list.append( hr.Li(u"This is the second item", style="color: red") )
# item = hr.Li()
# item.append(u"And this is a ")
# item.append( hr.A(u"http://google.com", u"link") )
# item.append(u"to google")
# list.append(item)
# body.append(list)
# page.append(body)
# render(page, u"test_html_output7.html")
# # Step 8
# ########
# page = hr.Html()
# head = hr.Head()
# head.append( hr.Meta(charset=u"UTF-8") )
# head.append(hr.Title(u"PythonClass = Revision 1087:"))
# page.append(head)
# body = hr.Body()
# body.append( hr.H(2, u"PythonClass - Class 6 example") )
# body.append(hr.P(u"Here is a paragraph of text -- there could be more of them, but this is enough to show that we can do some text",
# style=u"text-align: center; font-style: oblique;"))
# body.append(hr.Hr())
# list = hr.Ul(id=u"TheList", style=u"line-height:200%")
# list.append( hr.Li(u"The first item in a list") )
# list.append( hr.Li(u"This is the second item", style="color: red") )
# item = hr.Li()
# item.append(u"And this is a ")
# item.append( hr.A(u"http://google.com", "link") )
# item.append(u"to google")
# list.append(item)
# body.append(list)
# page.append(body)
# render(page, u"test_html_output8.html")
|
kinow-io/kinow-python-sdk
|
kinow_client/models/widget_home_rail.py
|
Python
|
apache-2.0
| 5,545
| 0.000361
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class WidgetHomeRail(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id=None, label=None, type=None, type_id=None, visibility=None, position=None):
"""
WidgetHomeRail - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'id': 'int',
'label': 'list[I18nField]',
'type': 'str',
'type_id': 'int',
'visibility': 'str',
'position': 'int'
}
self.attribute_map = {
'id': 'id',
'label': 'label',
'type': 'type',
'type_id': 'type_id',
'visibility': 'visibility',
'position': 'position'
}
self._id = id
self._label = label
self._type = type
self._type_id = type_id
self._visibility = visibility
self._position = position
@property
def id(self):
"""
Gets the id of this WidgetHomeRail.
:return: The id of this WidgetHomeRail.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this WidgetHomeRail.
:param id: The id of this WidgetHomeRail.
:type: int
"""
self._id = id
@property
def label(self):
""
|
"
Gets the label of this WidgetHomeRail.
:return: The label of this WidgetHomeRail.
:rtype: list[I18nField]
"""
return self._label
@label.setter
def label(self, label):
"""
Sets the label of this WidgetHomeRail.
:param label: The label of this WidgetHomeRail.
:type: l
|
ist[I18nField]
"""
self._label = label
@property
def type(self):
"""
Gets the type of this WidgetHomeRail.
:return: The type of this WidgetHomeRail.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this WidgetHomeRail.
:param type: The type of this WidgetHomeRail.
:type: str
"""
self._type = type
@property
def type_id(self):
"""
Gets the type_id of this WidgetHomeRail.
:return: The type_id of this WidgetHomeRail.
:rtype: int
"""
return self._type_id
@type_id.setter
def type_id(self, type_id):
"""
Sets the type_id of this WidgetHomeRail.
:param type_id: The type_id of this WidgetHomeRail.
:type: int
"""
self._type_id = type_id
@property
def visibility(self):
"""
Gets the visibility of this WidgetHomeRail.
:return: The visibility of this WidgetHomeRail.
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""
Sets the visibility of this WidgetHomeRail.
:param visibility: The visibility of this WidgetHomeRail.
:type: str
"""
self._visibility = visibility
@property
def position(self):
"""
Gets the position of this WidgetHomeRail.
:return: The position of this WidgetHomeRail.
:rtype: int
"""
return self._position
@position.setter
def position(self, position):
"""
Sets the position of this WidgetHomeRail.
:param position: The position of this WidgetHomeRail.
:type: int
"""
self._position = position
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
MDXDave/ModernWebif
|
plugin/controllers/models/owibranding.py
|
Python
|
gpl-2.0
| 15,558
| 0.03349
|
# -*- coding: utf-8 -*-
##############################################################################
# 2014 E2OpenPlugins #
# #
# This file is open source software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
##############################################################################
# Simulate the oe-a boxbranding module (Only functions required by OWIF) #
##############################################################################
from Plugins.Extensions.ModernWebif.__init__ import _
from Components.About import about
from socket import has_ipv6
from Tools.Directories import fileExists, pathExists
import string
import os, hashlib
try:
from Components.About import about
except:
pass
tpmloaded = 1
try:
from enigma import eTPM
if not hasattr(eTPM, 'getData'):
tpmloaded = 0
except:
tpmloaded = 0
def validate_certificate(cert, key):
buf = decrypt_block(cert[8:], key)
if buf is None:
return None
return buf[36:107] + cert[139:196]
def get_random():
try:
xor = lambda a,b: ''.join(chr(ord(c)^ord(d)) for c,d in zip(a,b*100))
random = urandom(8)
x = str(time())[-8:]
result = xor(random, x)
return result
except:
return None
def bin2long(s):
return reduce( lambda x,y:(x<<8L)+y, map(ord, s))
def long2bin(l):
res = ""
for byte in range(128):
res += chr((l >> (1024 - (byte + 1) * 8)) & 0xff)
return res
def rsa_pub1024(src, mod):
return long2bin(pow(bin2long(src), 65537, bin2long(mod)))
def decrypt_block(src, mod):
if len(src) != 128 and len(src) != 202:
return None
dest = rsa_pub1024(src[:128], mod)
hash = hashlib.sha1(dest[1:107])
if len(src) == 202:
hash.update(src[131:192])
result = hash.digest()
if result == dest[107:127]:
return dest
return None
def tpm_check():
try:
tpm = eTPM()
rootkey = ['\x9f', '|', '\xe4', 'G', '\xc9', '\xb4', '\xf4', '#', '&', '\xce', '\xb3', '\xfe', '\xda', '\xc9', 'U', '`', '\xd8', '\x8c', 's', 'o', '\x90', '\x9b', '\\', 'b', '\xc0', '\x89', '\xd1', '\x8c', '\x9e', 'J', 'T', '\xc5', 'X', '\xa1', '\xb8', '\x13', '5', 'E', '\x02', '\xc9', '\xb2', '\xe6', 't', '\x89', '\xde', '\xcd', '\x9d', '\x11', '\xdd', '\xc7', '\xf4', '\xe4', '\xe4', '\xbc', '\xdb', '\x9c', '\xea', '}', '\xad', '\xda', 't', 'r', '\x9b', '\xdc', '\xbc', '\x18', '3', '\xe7', '\xaf', '|', '\xae', '\x0c', '\xe3', '\xb5', '\x84', '\x8d', '\r', '\x8d', '\x9d', '2', '\xd0', '\xce', '\xd5', 'q', '\t', '\x84', 'c', '\xa8', ')', '\x99', '\xdc', '<', '"', 'x', '\xe8', '\x87', '\x8f', '\x02', ';', 'S', 'm', '\xd5', '\xf0', '\xa3', '_', '\xb7', 'T', '\t', '\xde', '\xa7', '\xf1', '\xc9', '\xae', '\x8a', '\xd7', '\xd2', '\xcf', '\xb2', '.', '\x13', '\xfb', '\xac', 'j', '\xdf', '\xb1', '\x1d', ':', '?']
random = None
result = None
l2r = False
l2k = None
l3k = None
l2c = tpm.getData(eTPM.DT_LEVEL2_CERT)
if l2c is None:
return 0
l2k = validate_certificate(l2c, rootkey)
if l2k is None:
return 0
l3c = tpm.getData(eTPM.DT_LEVEL3_CERT)
if l3c is None:
return 0
l3k = validate_certificate(l3c, l2k)
if l3k is None:
return 0
random = get_random()
if random is None:
retu
|
rn 0
value = tpm.computeSignature(random)
result = decrypt_block(value, l3k)
if result is None:
return 0
if result [80:88
|
] != random:
return 0
return 1
except:
return 0
def getAllInfo():
info = {}
brand = "unknown"
model = "unknown"
procmodel = "unknown"
orgdream = 0
if tpmloaded:
orgdream = tpm_check()
if fileExists("/proc/stb/info/hwmodel"):
brand = "DAGS"
f = open("/proc/stb/info/hwmodel",'r')
procmodel = f.readline().strip()
f.close()
if (procmodel.startswith("optimuss") or procmodel.startswith("pingulux")):
brand = "Edision"
model = procmodel.replace("optimmuss", "Optimuss ").replace("plus", " Plus").replace(" os", " OS")
elif (procmodel.startswith("fusion")):
brand = "Xsarius"
if procmodel == "fusionhd":
model = procmodel.replace("fusionhd", "Fusion HD")
elif procmodel == "fusionhdse":
model = procmodel.replace("fusionhdse", "Fusion HD SE")
elif fileExists("/proc/stb/info/azmodel"):
brand = "AZBox"
f = open("/proc/stb/info/model",'r') # To-Do: Check if "model" is really correct ...
procmodel = f.readline().strip()
f.close()
model = procmodel.lower()
elif fileExists("/proc/stb/info/gbmodel"):
brand = "GigaBlue"
f = open("/proc/stb/info/gbmodel",'r')
procmodel = f.readline().strip()
f.close()
model = procmodel.upper().replace("GBQUAD", "Quad").replace("PLUS", " Plus")
elif fileExists("/proc/stb/info/vumodel"):
brand = "Vu+"
f = open("/proc/stb/info/vumodel",'r')
procmodel = f.readline().strip()
f.close()
model = procmodel.title().replace("olose", "olo SE").replace("olo2se", "olo2 SE").replace("2", "²")
elif fileExists("/proc/boxtype"):
f = open("/proc/boxtype",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel in ("adb2850", "adb2849", "bska", "bsla", "bxzb", "bzzb"):
brand = "Advanced Digital Broadcast"
if procmodel in ("bska", "bxzb"):
model = "ADB 5800S"
elif procmodel in ("bsla", "bzzb"):
model = "ADB 5800SX"
elif procmodel == "adb2849":
model = "ADB 2849ST"
else:
model = "ADB 2850ST"
elif procmodel in ("esi88", "uhd88"):
brand = "Sagemcom"
if procmodel == "uhd88":
model = "UHD 88"
else:
model = "ESI 88"
elif fileExists("/proc/stb/info/boxtype"):
f = open("/proc/stb/info/boxtype",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel.startswith("et"):
brand = "Xtrend"
model = procmodel.upper()
elif procmodel.startswith("xp"):
brand = "MaxDigital"
model = procmodel
elif procmodel.startswith("ixuss"):
brand = "Medialink"
model = procmodel.replace(" ", "")
elif procmodel.startswith("formuler"):
brand = "Formuler"
model = procmodel.replace("formuler","")
elif procmodel.startswith("ini"):
if procmodel.endswith("9000ru"):
brand = "Sezam"
model = "Marvel"
elif procmodel.endswith("5000ru"):
brand = "Sezam"
model = "hdx"
elif procmodel.endswith("1000ru"):
brand = "Sezam"
model = "hde"
elif procmodel.endswith("5000sv"):
brand = "Miraclebox"
model = "mbtwin"
elif procmodel.endswith("1000sv"):
brand = "Miraclebox"
model = "mbmini"
elif procmodel.endswith("1000de"):
brand = "Golden Interstar"
model = "Xpeed LX"
elif procmodel.endswith("9000de"):
brand = "Golden Interstar"
model = "Xpeed LX3"
elif procmodel.endswith("1000lx"):
brand = "Golden Interstar"
model = "Xpeed LX"
elif procmodel.endswith("de"):
brand = "Golden Interstar"
elif procmodel.endswith("1000am"):
brand = "Atemio"
model = "5x00"
else:
brand = "Venton"
model = "HDx"
elif procmodel.startswith("unibox-"):
brand = "Venton"
model = "HDe"
elif procmodel == "hd1100":
brand = "Mut@nt"
model = "hd1100"
elif procmodel == "hd1200":
brand = "Mut@nt"
model = "hd1200"
elif procmodel == "hd2400":
brand = "Mut@nt"
model = "hd2400"
elif procmodel == "arivalink200":
brand = "Ferguson"
model = "Ariva @Link 200"
elif procmodel.startswith("spark"):
brand = "Fulan"
if procmodel == "spark7162":
model = "Spark 7162"
else:
model = "Spark"
elif procmodel == "wetekplay":
brand = "WeTeK"
model = procmodel
elif procmodel == "osmini":
brand = "Edision"
model = procmodel
elif fileExists("/proc/stb/info/model"):
f = open("/proc/stb/info/model",'r')
procmodel = f.readline().strip().lower()
f.close()
if procmodel == "tf7700hdpvr":
brand = "Topfield"
model = "TF7700 HDPVR"
elif procmodel == "dsi87":
brand = "Sagemcom"
model = "DSI
|
mtholder/pyraphyletic
|
phylesystem_api/tests.py
|
Python
|
bsd-2-clause
| 7,707
| 0.001946
|
"""Unittests that do not require the server to be running an common tests of responses.
The TestCase here just calls the functions that provide the logic to the ws views with DummyRequest
objects to mock a real request.
The functions starting with `check_...` are called with UnitTest.TestCase instance as the first
arg and the response. These functions are used within the unit tests in this file, but also
in the `ws-tests` calls that perform the tests through http.
"""
import os
import unittest
from pyramid import testing
from phylesystem_api.utility import fill_app_settings, umbrella_from_request
from phylesystem_api.views import import_nexson_from_crossref_metadata
def get_app_settings_for_testing(settings):
"""Fills the settings of a DummyRequest, with info from the development.ini
This allows the dummy requests to mock a real request wrt configuration-dependent settings."""
from peyotl.utility.imports import SafeConfigParser
cfg = SafeConfigParser()
devini_path = os.path.abspath(os.path.join('..', 'development.ini'))
if not os.path.isfile(devini_path):
raise RuntimeError('Expecting a INI file at "{}" to run tests'.format(devini_path))
cfg.read(devini_path)
settings['repo_parent'] = cfg.get('app:main', 'repo_parent')
fill_app_settings(settings=settings)
def gen_versioned_dummy_request():
"""Adds a version number (3) to the request to mimic the matching based on URL in the real app.
"""
req = testing.DummyRequest()
get_app_settings_for_testing(req.registry.settings)
req.matchdict['api_version'] = 'v3'
return req
def check_index_response(test_case, response):
"""Verifies the existene of expected keys in the response to an index call.
'documentation_url', 'description', and 'source_url' keys must be in the response.
"""
for k in ['documentation_url', 'description', 'source_url']:
test_case.assertIn(k, response)
def check_render_markdown_response(test_case, response):
"""Check of `response` to a `render_markdown` call."""
expected = '<p>hi from <a href="http://phylo.bio.ku.edu" target="_blank">' \
'http://phylo.bio.ku.edu</a> and ' \
'<a href="https://github.com/orgs/OpenTreeOfLife/dashboard" target="_blank">' \
'https://github.com/orgs/OpenTreeOfLife/dashboard</a></p>'
test_case.assertEquals(response.body, expected)
def check_study_list_and_config_response(test_case,
sl_response,
config_response,
from_generic_config):
"""Checks of responses from study_list, config, and the generic config calls."""
nsis = sum([i['number of documents'] for i in config_response['shards']])
test_case.assertEquals(nsis, len(sl_response))
test_case.assertEquals(from_generic_config, config_response)
def check_unmerged_response(test_case, ub):
"""Check of `ub` response from an `unmerged_branches` call"""
test_case.assertTrue('master' not in ub)
def check_config_response(test_case, cfg):
"""Check of `cfg` response from a `config` call"""
test_case.assertSetEqual(set(cfg.keys()), {"initialization", "shards", "number_of_shards"})
def check_external_url_response(test_case, doc_id, resp):
"""Simple check of an `external_url` `resp` response for `doc_id`.
`doc_id` and `url` fields of the response are checked."""
test_case.assertEquals(resp.get('doc_id'), doc_id)
test_case.assertTrue(resp.get('url', '').endswith('{}.json'.format(doc_id)))
def check_push_failure_response(test_case, resp):
"""Check of the `resp` response of a `push_failure` method call to verify it has the right keys.
"""
test_case.assertSetEqual(set(resp.keys()), {"doc_type", "errors", "pushes_succeeding"})
test_case.assertTrue(resp["pushes_succeeding"])
render_test_input = 'hi from <a href="http://phylo.bio.ku.edu" target="new">' \
'http://phylo.bio.ku.edu</a> and ' \
'https://github.com/orgs/OpenTreeOfLife/dashboard'
class ViewTests(unittest.TestCase):
"""UnitTest of the functions that underlie the ws views."""
def setUp(self):
"""Calls pyramid testing.setUp"""
self.config = testing.setUp()
def tearDown(self):
"""Calls pyramid testing.tearDown"""
testing.tearDown()
def test_index(self):
"""Test of index view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import index
check_index_response(self, index(request))
def test_render_markdown(self):
"""Test of render_markdown view"""
request = testing.DummyRequest(post={'src': render_test_input})
from phylesystem_api.views import render_markdown
check_render_markdown_response(self, render_markdown(request))
def test_study_list_and_config(self):
"""Test of study_list and phylesystem_config views"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import study_list
sl = study_list(request)
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config
x = phylesystem_config(request)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import generic_config
y = generic_config(request)
check_study_list_and_config_response(self, sl, x, y)
if not sl:
return
from phylesystem_api.views import external_url
doc_id = sl[0]
request.matchdict['doc_id'] = doc_id
e = external_url(request)
check_external_url_response(self, doc_id, e)
def test_unmerged(self):
"""Test of unmerged_branches view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import unmerged_branches
check_unmerged_response(self, unmerged_branches(request))
def test_config(self):
"""Test of generic_config view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config, generic_config
r2 = phylesystem_config(request)
check_config_response(self, r2)
request.matchdict['resource_type'] = 'study'
r = generic_config(request)
check_config_response(self, r)
self.assertDictEqual(r, r2)
request.matchdict['resource_type'] = 'amendment'
ra = generic_config(request)
check_config_response(self, ra)
self.assertNotEqual(ra, r)
def test_push_failure_state(self):
"""Test of push_failure view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'collection'
from p
|
hylesystem_api.views import push_failure
pf = push_failure(request)
check_pu
|
sh_failure_response(self, pf)
def test_doi_import(self):
"""Make sure that fetching from DOI generates a valid study shell."""
doi = "10.3732/ajb.0800060"
document = import_nexson_from_crossref_metadata(doi=doi,
ref_string=None,
include_cc0=None)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
umbrella = umbrella_from_request(request)
errors = umbrella.validate_and_convert_doc(document, {})[1]
self.assertEquals(len(errors), 0)
if __name__ == '__main__':
unittest.main()
|
liorvh/grab
|
setup.py
|
Python
|
mit
| 1,293
| 0
|
from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.22',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six'],
license="MIT",
k
|
eywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Producti
|
on/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
|
cloudify-cosmo/softlayer-python
|
SoftLayer/CLI/snapshot/list.py
|
Python
|
mit
| 1,122
| 0
|
"""List iSCSI Snapshots."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer import utils
import click
@click.command()
@click.argument('iscsi-identifier')
@environment.pass_env
def cli(env, iscsi_identifier):
"""List iSCSI Snapshots."""
iscsi_mgr = SoftLayer.ISCSIManager(env.client)
iscsi_id = helpers.resolve_id(iscsi_mgr.resolve_ids,
iscsi_identifier,
'iSCSI')
iscsi = env.client['Network_Storage_Iscsi']
snapshots = iscsi.getPartnerships(
mask='volumeId,partnerVolum
|
eId,createDate,type', id=iscsi_id)
snapshots = [utils.NestedDict(n) for n in snapshots]
table = formatting.Table(['id', 'createDate', 'name', 'description'])
for snapshot in snapshots:
table.add_row([
snapshot['par
|
tnerVolumeId'],
snapshot['createDate'],
snapshot['type']['name'],
snapshot['type']['description'],
])
return table
|
ajventer/ezdm
|
ezdm_libs/all_characters.py
|
Python
|
gpl-3.0
| 1,127
| 0.000887
|
from .frontend import JSON_Editor, mode, Page
from . import frontend
from .character import Character
from .util import load_json, debug
class CHARACTERS(JSON_Editor):
def __init__(self):
self._name = 'character'
JSON_Editor.__init__(self)
self._icons = 'avatars'
self._obj = Character({})
def render(self, requestdata):
if mode() == 'dm':
return JSON_Editor.render(self, reque
|
stdata)
else:
char = frontend.campaign.current_char()
return self.view(char.name())
def view(self, item):
page = Page()
if not item:
page.error('No item specified')
return page.render()
try:
debug('try %s/%s' % (self._name, item))
json = load_json('%ss' % self._name, item)
except:
debug('except')
page.error('No files matching %s found in %s' %
|
(item, self._name))
return page.render()
c = Character(json)
rendered = {}
rendered['json'] = c.render()
return page.tplrender('json_viewer.tpl', rendered)
|
marbindrakon/eve-wspace
|
evewspace/search/registry.py
|
Python
|
apache-2.0
| 3,019
| 0.002981
|
# Eve W-Space
# Copyright 2014 Andrew Austin and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A registry module for registration of searches.
This is based on the registry modules from django_autocomplete_light
"""
from django.db import models
from search_base import SearchBase
class SearchRegistry(dict):
"""
Dict with methods for handling search registration.
"""
def __init__(self):
self._models = {}
def search_for_model(self, model):
try:
return self._models[model]
except KeyError:
return
def unregister(self, name):
search = self[name]
del self[name]
def register(self, model, name, search_field, queryset):
"""
Registers a search on a model.
This is a simple form of the registry from django_autocomplete_light
that must be provided with a model, name, and the field on the model
to search.
"""
if not issubclass(model, models.Model):
raise AttributeError
if not search_field:
raise AttributeError
if not name:
name = '%sSearch' % model.__name__
base = SearchBase
try:
search_model_field = model._meta.get_field(search_field)
except:
raise Exception('The provided search field is not defined int he model.')
if not queryset:
queryset = model.objects.all()
baseContext = {'choices': queryset,
'search_field': search_model_field}
search = type(name, (base,), baseContext)
self[search.__name__] = search
self._models
|
[model] = search
def _autodiscover(registry):
import copy
from django.conf import settings
from importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import app's searches module
try:
before_import_registry = copy.copy(registry)
|
import_module('%s.searches' % app)
except:
registry = before_import_registry
if module_has_submodule(mod, 'searches'):
raise
registry = SearchRegistry()
def autodiscover():
_autodiscover(registry)
def register(model, name, search_field, queryset=None):
"""Proxy for registry register method."""
return registry.register(model, name, search_field, queryset)
|
ypersyntelykos/GitSavvy
|
core/git_mixins/checkout_discard.py
|
Python
|
mit
| 589
| 0
|
class CheckoutDiscardMixin():
def discard_all_unstaged(self):
"""
Any changes that are not staged or committed will be reverted
to their state in HEAD. Any new files will be deleted.
"""
self.git("clean", "-df")
se
|
lf.git("checkout", "--", ".")
def checkout_file(self, fpath):
"""
Given an absolute path or path relative to the repo's r
|
oot, discard
any changes made to the file and revert it in the working directory
to the state it is in HEAD.
"""
self.git("checkout", "--", fpath)
|
INTechSenpai/moon-rover
|
debug_tools/python_debug_console/AsciiSerial.py
|
Python
|
gpl-3.0
| 11,638
| 0.003268
|
import serial
import serial.tools.list_ports
import copy
import numpy as np
import math
import random
class AsciiSerial:
def __init__(self):
self._graphsChannels = {'graph1': None, 'graph2': None, 'graph3': None, 'graph4': None}
self._enChannels = {'graph1': False, 'graph2': False, 'graph3': False, 'graph4': False}
# Structure definition:
# {'ChannelName': channelData('display', {'lineName': [lowLevelID, xFieldID, yFieldID(optional)], ... }), ... }
self._channelsDataStructure = {
'POSITION': channelData('line-scatter', {'p': [0, 0, 1]}),
'TRAJECTORY': channelData('line-scatter', {'t': [1, 0, 1]}),
'PID_V_G': channelData('line', {'setPoint': [2, 0], 'value': [2, 1], 'output': [2, 2]}),
'PID_V_D': channelData('line', {'setPoint': [3, 0], 'value': [3, 1], 'output': [3, 2]}),
'PID_TRANS': channelData('line', {'setPoint': [4, 0], 'value': [4, 1], 'output': [4, 2]}),
'BLOCKING_M_G': channelData('line', {'aimSpeed': [5, 0], 'realSpeed': [5, 1], 'isBlocked': [5, 2]}),
'BLOCKING_M_D': channelData('line', {'aimSpeed': [6, 0], 'realSpeed': [6, 1], 'isBlocked': [6, 2]}),
'STOPPING_MGR': channelData('line', {'speed': [7, 0], 'isStopped': [7, 1]}),
'DIRECTION': channelData('line', {'aimDirection': [8, 0], 'realDirection': [8, 1]}),
'SENSORS': channelData('scatter', {'sensorTest': [9, 0, 1]}),
'PID_TRAJ': channelData('scatter', {'p': [0, 0, 1], 't': [1, 0, 1]}),
'TRAJ_ERRORS': channelData('line', {'t': [10, 0], 'c': [10, 1], 'ac': [10, 2], 'ang': [10, 3], 'pos': [10, 4]}),
'AX12_G': channelData('line', {'aim': [8, 4], 'real': [8, 2]}),
'AX12_D': channelData('line', {'aim': [8, 5], 'real': [8, 3]})
}
self._shapeInitData = {
'line': [],
'line-scatter': [[], []],
'scatter': [[], []]
}
self.linesToSend = []
self.receivedLines_main = []
self.receivedLines_warning = []
self.receivedLines_error = []
self.serial = serial.Serial()
self.incomingLine = ""
# Format des données :
# {'graphN': {'data': {'lineName': lineData, ...}, 'shape': String}
#
# 'shape' peut être :
# "line" : ligne continue reliant chaque point
# "scatter": nuage de points (x,y) indépendants
# "line-scatter: nuage de points (x,y) reliés entre eux
#
# Pour chaque 'shape', 'lineData' a une forme différente :
# "line" : tableau à 1 dimension représentant les coordonnées y de chaque point
# "scatter": tableau t à 2 dimensions. t[0] est un tableau représentant x pour chaque point. t[1] représente y
# "line-scatter": idem que 'scatter'
self.graphData = {'graph1': {'data': None, 'shape': None},
'graph2': {'data': None, 'shape': None},
'graph3': {'data': None, 'shape': None},
'graph4': {'data': None, 'shape': None}}
self.phase = np.linspace(0, 10 * np.pi, 100)
self.index = 0
@staticmethod
def scanPorts():
return list(serial.tools.list_ports.comports())
def open(self, port):
self.serial.port = port.split(" ")[0]
self.serial.open()
def close(self):
self.serial.close()
def getChannelsList(self):
channelsList = []
for key in self._channelsDataStructure:
channelsList.append(key)
channelsList.sort()
return channelsList
def getChannelsFromID(self, identifier):
channels = set()
for channel, cData in self._channelsDataStructure.items():
lines = cData.lineNames
for lineName, lineIds in lines.items():
if lineIds[0] == identifier:
channels.add(channel)
return channels
def getIDsFromChannel(self, channel):
ids = set()
lines = self._channelsDataStructure[channel].lineNames
for lineName, lineIds in lines.items():
ids.add(lineIds[0])
return ids
def communicate(self):
if self.serial.is_open:
for line in self.linesToSend:
self.serial.write(line.encode('ascii'))
self.linesToSend.clear()
nbB = self.serial.in_waiting
if nbB > 0:
self.incomingLine += self.serial.read(nbB).decode(encoding='utf-8', errors='ignore')
newLineIndex = self.incomingLine.find('\n')
while newLineIndex != -1:
self.addLineToProperList(self.incomingLine[0:newLineIndex+1])
self.incomingLine = self.incomingLine[newLineIndex+1:]
newLineIndex = self.incomingLine.find('\n')
def addLineToProperList(self, line):
if len(line) > 5 and line[0:6] == "_data_":
try:
self.addGraphData(line[6:])
except ValueError:
self.receivedLines_main.append(line)
elif len(line) > 8 and line[0:9] == "_warning_":
self.receivedLines_warning.append(line[9:])
elif len(line) > 7 and line[0:7] == "_error_":
splittedLine = line.split("_")
errorLine = "#" + splittedLine[2] + "# "
for s in splittedLine[3:]:
errorLine += s
self.receivedLines_error.append(errorLine)
else:
self.receivedLines_main.append(line)
def addGraphData(self, strData):
data = strData.split("_")
idChannel = int(data[0])
channels = self.getChannelsFromID(idChannel)
values = []
for strValue in data[1:]:
values.append(float(strValue))
for graph in ['graph1', 'graph2', 'graph3', 'graph4']:
gChannel = self._graphsChannels[graph]
if gChannel in channels and self._enChannels[graph]:
lines = self._channelsDataStructure[gChannel].lineNames
for lineName, ids in lines.items():
if ids[0] == idChannel:
if len(ids) == 2: # One dimension data
if len(values) <= 1:
raise ValueError
self.graphData[graph]['data'][lineName].append(values[ids[1]])
elif len(ids) == 3: # Two dimensions data
if len(values) <= 2:
raise ValueError
self.graphData[graph]['data'][lineName][0].append(values[ids[1]])
self.graphData[graph]['data'][lineName][1].append(values[ids[2]])
def setEnabledChannels(self, competeConfig):
newGraphsChannels = {'graph1': competeConfig['graph1']['channel'],
|
'graph2': competeConfig['graph2']['channel'],
'graph3': competeConfig['graph3']['channel'],
'
|
graph4': competeConfig['graph4']['channel']}
newEnabledList = {'graph1': competeConfig['graph1']['enable'],
'graph2': competeConfig['graph2']['enable'],
'graph3': competeConfig['graph3']['enable'],
'graph4': competeConfig['graph4']['enable']}
commandLines = []
graphs = ['graph1', 'graph2', 'graph3', 'graph4']
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChannels[graph]:
if self._enChannels[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
else:
if newEnabledList[graph] != self._enChannels[graph]:
if not newEnabledList[graph]:
commandLines += self.enableChannel(self._graphsChannels[graph], False)
for graph in graphs:
if newGraphsChannels[graph] != self._graphsChan
|
vpadillar/pventa
|
reporte/migrations/0002_auto_20161110_0819.py
|
Python
|
mit
| 504
| 0.001984
|
# -*- coding: u
|
tf-8 -*-
# Generated by Django 1.9.1 on 2016-11-10 08:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reporte', '0001_initial'),
]
operatio
|
ns = [
migrations.AlterField(
model_name='reporteproducto',
name='tipo',
field=models.IntegerField(choices=[(1, 'Diaria'), (2, 'Semana'), (3, 'Mensual'), (4, 'Anual')]),
),
]
|
betterclever/susi_linux
|
main/renderer/configuration_window.py
|
Python
|
apache-2.0
| 8,711
| 0
|
import os
from pathlib import Path
import gi
import logging
from gi.repository import Gtk
import json_config
from .login_window import LoginWindow
TOP_DIR = os.path.dirname(os.path.abspath(__file__))
config = json_config.connect('config.json')
gi.require_version('Gtk', '3.0')
class WatsonCredentialsDialog(Gtk.Dialog):
def __init__(self, parent):
Gtk.Dialog.__init__(self, "Enter Credentials", parent, 0,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150, 100)
username_field = Gtk.Entry()
username_field.set_placeholder_text("Username")
password_field = Gtk.Entry()
password_field.set_placeholder_text("Password")
password_field.set_visibility(False)
password_field.set_invisible_char('*')
self.username_field = username_field
self.password_field = password_field
box = self.get_content_area()
box.set_margin_top(10)
box.set_margin_bottom(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_spacing(10)
box.add(username_field)
box.add(password_field)
self.show_all()
class BingCredentialDialog(Gtk.Dialog):
def __init__(self, parent):
Gtk.Dialog.__init__(self, "Enter API Key", parent, 0,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150, 100)
api_key_field = Gtk.Entry()
api_key_field.set_placeholder_text("API Key")
self.api_key_field = api_key_field
box = self.get_content_area()
box.set_margin_top(10)
box.set_margin_bottom(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_spacing(10)
box.add(api_key_field)
self.show_all()
class ConfigurationWindow:
def __init__(self) -> None:
super().__init__()
builder = Gtk.Builder()
builder.add_from_file(os.path.join(
TOP_DIR, "glade_files/configure.glade"))
self.window = builder.get_object("configuration_window")
self.stt_combobox = builder.get_object("stt_combobox")
self.tts_combobox = builder.get_object("tts_combobox")
self.auth_switch = builder.get_object("auth_switch")
self.snowboy_switch = builder.get_object("snowboy_switch")
self.wake_button_switch = builder.get_object("wake_button_switch")
self.init_auth_switch()
self.init_tts_combobox()
self.init_stt_combobox()
self.init_hotword_switch()
self.init_wake_button_switch()
builder.connect_signals(ConfigurationWindow.Handler(self))
self.window.set_resizable(False)
def show_window(self):
self.window.show_all()
Gtk.main()
def exit_window(self):
self.window.destroy()
Gtk.main_quit()
def init_tts_combobox(self):
default_tts = config['default_tts']
if default_tts == 'google':
self.tts_combobox.set_active(0)
elif default_tts == 'flite':
self.tts_combobox.set_active(1)
elif default_tts == 'watson':
self.tts_combobox.s
|
et_active(2)
else:
self.tts_combobox.set_active(0)
config['default_tts'] = 'google'
def init_stt_combobox(self):
default_stt = config['default_stt']
if default_stt == 'google':
self.stt_co
|
mbobox.set_active(0)
elif default_stt == 'watson':
self.stt_combobox.set_active(1)
elif default_stt == 'bing':
self.stt_combobox.set_active(2)
else:
self.tts_combobox.set_active(0)
config['default_tts'] = 'google'
def init_auth_switch(self):
usage_mode = config['usage_mode']
if usage_mode == 'authenticated':
self.auth_switch.set_active(True)
else:
self.auth_switch.set_active(False)
def init_hotword_switch(self):
try:
parent_dir = os.path.dirname(TOP_DIR)
snowboyDetectFile = Path(os.path.join(
parent_dir, "hotword_engine/snowboy/_snowboydetect.so"))
print(snowboyDetectFile)
if not snowboyDetectFile.exists():
self.snowboy_switch.set_sensitive(False)
config['hotword_engine'] = 'PocketSphinx'
except Exception as e:
logging.error(e)
config['hotword_engine'] = 'PocketSphinx'
if config['hotword_engine'] == 'Snowboy':
self.snowboy_switch.set_active(True)
else:
self.snowboy_switch.set_active(False)
def init_wake_button_switch(self):
try:
import RPi.GPIO
if config['WakeButton'] == 'enabled':
self.wake_button_switch.set_active(True)
else:
self.wake_button_switch.set_active(False)
except ImportError:
self.wake_button_switch.set_sensitive(False)
class Handler:
def __init__(self, config_window):
self.config_window = config_window
def on_delete_window(self, *args):
print('Exiting')
self.config_window.exit_window()
def on_stt_combobox_changed(self, combo: Gtk.ComboBox):
selection = combo.get_active()
if selection == 0:
config['default_stt'] = 'google'
elif selection == 1:
credential_dialog = WatsonCredentialsDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
config['default_stt'] = 'watson'
config['watson_stt_config']['username'] = username
config['watson_stt_config']['password'] = password
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
elif selection == 2:
credential_dialog = BingCredentialDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
api_key = credential_dialog.api_key_field.get_text()
config['default_stt'] = 'bing'
config['bing_speech_api_key']['username'] = api_key
else:
self.config_window.init_stt_combobox()
credential_dialog.destroy()
def on_tts_combobox_changed(self, combo):
selection = combo.get_active()
if selection == 0:
config['default_tts'] = 'google'
elif selection == 1:
config['default_tts'] = 'flite'
elif selection == 2:
credential_dialog = WatsonCredentialsDialog(
self.config_window.window)
response = credential_dialog.run()
if response == Gtk.ResponseType.OK:
username = credential_dialog.username_field.get_text()
password = credential_dialog.password_field.get_text()
config['default_tts'] = 'watson'
config['watson_tts_config']['username'] = username
config['watson_tts_config']['password'] = password
config['watson_tts_config']['voice'] = 'en-US_AllisonVoice'
else:
self.config_window.init_tts_combobox()
credential_dialog.destroy()
def on_auth_switch_active_notify(self, switch, gparam):
if switch.get_active():
login_window = LoginWindow()
login_window.show_window()
if config['usage_mode'] == 'authenticated':
switch.set_active(Tr
|
mkoistinen/django-filer
|
filer/admin/imageadmin.py
|
Python
|
bsd-3-clause
| 1,629
| 0
|
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.utils.translation import ugettext as _
from filer.admin.fileadmin import FileAdmin
from filer.models import Image
class ImageAdminForm(forms.ModelForm):
subject_location = forms.CharField(
max_length=64, required=False,
label=_('Subject location'),
help_text=_('Location of the main subject of the scene.'))
def sidebar_image_ratio(self):
if self.instance:
# this is very important. It forces the value to be returned as a
# string and always with a "." as seperator. If the conversion
# from float to string is done in the template, the locale will
# be used and in some cases there would be a "," i
|
nstead of ".".
# javascript would parse that to an integer.
return '%.6F' % self.instance.sidebar_image_ratio()
else:
return ''
class Meta:
model = Image
exclude = ()
class Media:
css = {
# 'all': (settings.MEDIA_URL + 'filer/css/focal_point.css',)
|
}
js = (
static('filer/js/raphael.js'),
static('filer/js/focal_point.js'),
)
class ImageAdmin(FileAdmin):
form = ImageAdminForm
ImageAdmin.fieldsets = ImageAdmin.build_fieldsets(
extra_main_fields=('author', 'default_alt_text', 'default_caption',),
extra_fieldsets=(
('Subject Location', {
'fields': ('subject_location',),
'classes': ('collapse',),
}),
)
)
|
tejo-esperanto/pasportaservo
|
core/middleware.py
|
Python
|
agpl-3.0
| 6,047
| 0.002481
|
from django.conf import settings
from django.contrib.auth.views import (
LoginView, LogoutView, redirect_to_login as redirect_to_intercept,
)
from django.core.exceptions import PermissionDenied, ValidationError
from django.template.response import TemplateResponse
from django.urls import Resolver404, resolve, reverse
from django.utils import timezone
from django.utils.deprecation import MiddlewareMixin
from django.utils.text import format_lazy
from django.utils.translation import ugettext_lazy as _
from core.models import Agreement, Policy, SiteConfiguration
from core.views import AgreementRejectView, AgreementView, HomeView
from hosting.models import Preferences, Profile
from hosting.validators import TooNearPastValidator
from pasportaservo.urls import (
url_index_debug, url_index_maps, url_index_postman,
)
class AccountFlagsMiddleware(MiddlewareMixin):
"""
Updates any flags and settings related to the user's account, whose value
cannot be pre-determined.
Checks that pre-conditions for site usage are satisfied.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
exclude_urls = [
reverse('admin:index'),
url_index_debug,
settings.STATIC_URL,
settings.MEDIA_URL,
'/favicon.ico',
url_index_maps,
]
self.exclude_urls = tuple(str(url) for url in exclude_urls)
def process_request(self, request):
if not request.user.is_authenticated:
# Only relevant to logged in users.
return
if request.path.startswith(self.exclude_urls):
# Only relevant when using the website itself (not Django-Admin or debug tools),
# when the file requested is not a static one,
# and when the request is not for resources or configurations related to maps.
request.skip_hosting_checks = True
return
profile = Profile.all_objects.filter(user=request.user)[0:1]
if 'flag_analytics_setup' not in request.session:
# Update user's analytics consent according to the DNT setting in the browser, first time
# when the user logs in (DNT==True => opt out). Prior to that the consent is undefined.
pref = Preferences.objects.filter(profile=profile, site_analytics_consent__isnull=True)
pref.update(site_analytics_consent=not request.DNT)
request.session['flag_analytics_setup'] = str(timezone.now())
# Is user's age above the legally required minimum?
birth_date = profile.values_list('birth_date', flat=True)
trouble_view = None
try:
trouble_view = resolve(request.path)
if (hasattr(trouble_view.func, 'view_class') and trouble_view.func.view_class not in
[LoginView, LogoutView, HomeView, AgreementRejectView]):
try:
resolve(request.path, 'pages.urls')
except Resolver404:
# The URL accessed is not one of the general pages.
pass
else:
# A general page is ok.
trouble_view = None
else:
trouble_view = None
except Resolver404:
# A non-existent page is ok.
pass
if trouble_view is not None and len(birth_date) != 0 and birth_date[0]:
birth_date_value = birth_date[0] # Returned as a list from the query.
try:
TooNearPastValidator(SiteConfiguration.USER_MIN_AGE)(birth_date_value)
except ValidationError:
raise PermissionDenied(format_lazy(
_("Unfortunately, you are still too young to use Pasporta Servo. "
"Wait until you are {min_age} years of age!"),
min_age=SiteConfiguration.USER_MIN_AGE
))
# Has the user consented to the most up-to-date usage policy?
policy = (Policy.objects.order_by('-id').values('version', 'content'))[0:1]
if trouble_view is not None:
agreement = Agreement.objects.filter(
user=request.user, policy_version__in=policy.values_list('version'), withdrawn__isnull=True)
if not agreement.exists():
# Policy will be needed to display the following page anyway,
|
# so it is immediately fetched from the database.
request.user.consent_required = [polic
|
y.first()]
if request.user.consent_required[0] is None:
raise RuntimeError("Service misconfigured: No user agreement was defined.")
if trouble_view.func.view_class != AgreementView:
return redirect_to_intercept(
request.get_full_path(),
reverse('agreement'),
redirect_field_name=settings.REDIRECT_FIELD_NAME
)
else:
# Policy most probably will not be needed, so it is lazily
# evaluated to spare a superfluous query on the database.
request.user.consent_obtained = policy
if request.path.startswith(url_index_postman) and len(birth_date) == 0 and not request.user.is_superuser:
# We can reuse the birth date query result to avoid an additional
# query in the DB. For users with a profile, the result will not
# be empty and hold some value (either datetime or None).
t = TemplateResponse(
request, 'registration/profile_create.html', status=403,
context={
'function_title': _("Inbox"),
'function_description': _("To be able to communicate with other members of the PS community, "
"you need to create a profile."),
})
t.render()
return t
|
amol-/tgext.matplotrender
|
setup.py
|
Python
|
mit
| 1,201
| 0.003331
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1"
TEST_REQUIREMENTS = [
'numpy',
'pillow',
'webtest'
]
setup(
name='tgext.matplotrender',
version=version,
description="Renderer to expose matplotlib figures",
long_description=README,
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3A
|
action=list_classifiers
keywords='turbogears2.extension',
author='Alessandro Molina',
author_email='amol@turbogears.org',
url='https://github.com/amol-/tgext.matplotrender',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages = ['tgext'],
include_package_data=True,
zip_safe=False,
install_requires=[
"TurboGears2
|
>= 2.3.8",
'matplotlib'
],
extras_require={
# Used by Travis and Coverage due to setup.py nosetests
# causing a coredump when used with coverage
'testing': TEST_REQUIREMENTS,
},
entry_points="""
# -*- Entry points: -*-
"""
)
|
cloudify-cosmo/cloudify-manager-blueprints
|
components/python/scripts/bootstrap_validate.py
|
Python
|
apache-2.0
| 1,334
| 0
|
#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
# Most images already ship with the following packages:
#
# python-setuptools
# python-backports
# python-backports-ssl_match_hostname
#
# - as they are dependencies of cloud-init, which is extremely popular.
#
# However, cloud-init is irrelevant for certain IaaS (such as vSphere) so
# images used there may not have these packages preinstalled.
#
# We're currently considering whether to i
|
nclude these libraries in the
# manager resources package. Until then, we only validate that they're
# preinstalled, and if not - instruct the user to install them.
missing_packages = set()
for pkg in ['python-setuptools',
'python-backports',
'python-backports-ssl_match_hostname']:
ctx.logger.info('Ensuring {0} is installed'.format(pkg))
is_installed = utils.RpmPackageHandler.is_package_installed(pkg)
if not is_installe
|
d:
missing_packages.add(pkg)
if missing_packages:
ctx.abort_operation('Prerequisite packages missing: {0}. '
'Please ensure these packages are installed and '
'try again'.format(', '.join(missing_packages)))
|
kirillzhuravlev/atrex
|
Software/gaussfitter.py
|
Python
|
lgpl-3.0
| 23,723
| 0.024702
|
"""
===========
gaussfitter
===========
.. codeauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com> 3/17/08
Latest version available at <http://code.google.com/p/agpy/source/browse/trunk/agpy/gaussfitter.py>
"""
import numpy
from numpy.ma import median
from numpy import pi
#from scipy import optimize,stats,pi
from mpfit import mpfit
"""
Note about mpfit/leastsq:
I switched everything over to the Markwardt mpfit routine for a few reasons,
but foremost being the ability to set limits on parameters, not just force them
to be fixed. As far as I can tell, leastsq does not have that capability.
The version of mpfit I use can be found here:
http://code.google.com/p/agpy/source/browse/trunk/mpfit
.. todo::
-turn into a class instead of a collection of objects
-implement WCS-based gaussian fitting with correct coordinates
"""
def moments(data,circle,rotate,vheight,estimator=median,**kwargs):
"""Returns (height, amplitude, x, y, width_x, width_y, rotation angle)
the gaussian parameters of a 2D distribution by calculating its
moments. Depending on the input parameters, will only output
a subset of the above.
If using masked arrays, pass estimator=numpy.ma.median
"""
total = numpy.abs(data).sum()
Y, X = numpy.indices(data.shape) # python convention: reverse x,y numpy.indices
y = numpy.argmax((X*numpy.abs(data)).sum(axis=1)/total)
x = numpy.argmax((Y*numpy.abs(data)).sum(axis=0)/total)
col = data[int(y),:]
# FIRST moment, not second!
width_x = numpy.sqrt(numpy.abs((numpy.arange(col.size)-y)*col).sum()/numpy.abs(col).sum())
row = data[:, int(x)]
width_y = numpy.sqrt(numpy.abs((numpy.arange(row.size)-x)*row).sum()/numpy.abs(row).sum())
width = ( width_x + width_y ) / 2.
height = estimator(data.ravel())
amplitude = data.max()-height
mylist = [amplitude,x,y]
if numpy.isnan(width_y) or numpy.isnan(width_x) or numpy.isnan(height) or numpy.isnan(amplitude):
raise ValueError("something is nan")
if vheight==1:
mylist = [height] + mylist
if circle==0:
mylist = mylist + [width_x,width_y]
if rotate==1:
mylist = mylist + [0.] #rotation "moment" is just zer
|
o...
# also, circles don't rotate.
else:
mylist = mylist + [width]
return mylist
def twodgaussian(inpars, circle=False, rotate=True, vheight=True, shape=None):
"""Returns a 2d gaussian function of the form:
x' = numpy.cos(rota) * x - numpy.
|
sin(rota) * y
y' = numpy.sin(rota) * x + numpy.cos(rota) * y
(rota should be in degrees)
g = b + a * numpy.exp ( - ( ((x-center_x)/width_x)**2 +
((y-center_y)/width_y)**2 ) / 2 )
inpars = [b,a,center_x,center_y,width_x,width_y,rota]
(b is background height, a is peak amplitude)
where x and y are the input parameters of the returned function,
and all other parameters are specified by this function
However, the above values are passed by list. The list should be:
inpars = (height,amplitude,center_x,center_y,width_x,width_y,rota)
You can choose to ignore / neglect some of the above input parameters
unumpy.sing the following options:
circle=0 - default is an elliptical gaussian (different x, y
widths), but can reduce the input by one parameter if it's a
circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can
remove last parameter by setting rotate=0
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
shape=None - if shape is set (to a 2-parameter list) then returns
an image with the gaussian defined by inpars
"""
inpars_old = inpars
inpars = list(inpars)
if vheight == 1:
height = inpars.pop(0)
height = float(height)
else:
height = float(0)
amplitude, center_y, center_x = inpars.pop(0),inpars.pop(0),inpars.pop(0)
amplitude = float(amplitude)
center_x = float(center_x)
center_y = float(center_y)
if circle == 1:
width = inpars.pop(0)
width_x = float(width)
width_y = float(width)
rotate = 0
else:
width_x, width_y = inpars.pop(0),inpars.pop(0)
width_x = float(width_x)
width_y = float(width_y)
if rotate == 1:
rota = inpars.pop(0)
rota = pi/180. * float(rota)
rcen_x = center_x * numpy.cos(rota) - center_y * numpy.sin(rota)
rcen_y = center_x * numpy.sin(rota) + center_y * numpy.cos(rota)
else:
rcen_x = center_x
rcen_y = center_y
if len(inpars) > 0:
raise ValueError("There are still input parameters:" + str(inpars) + \
" and you've input: " + str(inpars_old) + \
" circle=%d, rotate=%d, vheight=%d" % (circle,rotate,vheight) )
def rotgauss(x,y):
if rotate==1:
xp = x * numpy.cos(rota) - y * numpy.sin(rota)
yp = x * numpy.sin(rota) + y * numpy.cos(rota)
else:
xp = x
yp = y
g = height+amplitude*numpy.exp(
-(((rcen_x-xp)/width_x)**2+
((rcen_y-yp)/width_y)**2)/2.)
return g
if shape is not None:
return rotgauss(*numpy.indices(shape))
else:
return rotgauss
def gaussfit(data,err=None,params=(),autoderiv=True,return_all=False,circle=False,
fixed=numpy.repeat(False,7),limitedmin=[False,False,False,False,True,True,True],
limitedmax=[False,False,False,False,False,False,True],
usemoment=numpy.array([],dtype='bool'),
minpars=numpy.repeat(0,7),maxpars=[0,0,0,0,0,0,360],
rotate=1,vheight=1,quiet=True,returnmp=False,
returnfitimage=False,**kwargs):
"""
Gaussian fitter with the ability to fit a variety of different forms of
2-dimensional gaussian.
Input Parameters:
data - 2-dimensional data array
err=None - error array with same size as data array
params=[] - initial input parameters for Gaussian function.
(height, amplitude, x, y, width_x, width_y, rota)
if not input, these will be determined from the moments of the system,
assuming no rotation
autoderiv=1 - use the autoderiv provided in the lmder.f function (the
alternative is to us an analytic derivative with lmdif.f: this method
is less robust)
return_all=0 - Default is to return only the Gaussian parameters.
1 - fit params, fit error
returnfitimage - returns (best fit params,best fit image)
returnmp - returns the full mpfit struct
circle=0 - default is an elliptical gaussian (different x, y widths),
but can reduce the input by one parameter if it's a circular gaussian
rotate=1 - default allows rotation of the gaussian ellipse. Can remove
last parameter by setting rotate=0. numpy.expects angle in DEGREES
vheight=1 - default allows a variable height-above-zero, i.e. an
additive constant for the Gaussian function. Can remove first
parameter by setting this to 0
usemoment - can choose which parameters to use a moment estimation for.
Other parameters will be taken from params. Needs to be a boolean
array.
Output:
Default output is a set of Gaussian parameters with the same shape as
the input parameters
Can also output the covariance matrix, 'infodict' that contains a lot
more detail about the fit (see scipy.optimize.leastsq), and a message
from leastsq telling what the exit status of the fitting routine was
Warning: Does NOT necessarily output a rotation angle between 0 and 360 degrees.
"""
usemoment=numpy.array(usemoment,dtype='bool')
params=numpy.array(params,dtype='float')
if usemoment.any() and len(pa
|
EduPepperPDTesting/pepper2013-testing
|
lms/djangoapps/reportlab/graphics/widgets/grids.py
|
Python
|
agpl-3.0
| 18,133
| 0.013511
|
#Copyright ReportLab Europe Ltd. 2000-2016
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgets/grids.py
__version__='3.3.0'
from reportlab.lib import colors
from reportlab.lib.validators import isNumber, isColorOrNone, isBoolean, isListOfNumbers, OneOf, isListOfColors, isNumberOrNone
from reportlab.lib.attrmap import AttrMap, AttrMapValue
from reportlab.graphics.shapes import Drawing, Group, Line, Rect, LineShape, definePath, EmptyClipPath
from reportlab.graphics.widgetbase import Widget
def frange(start, end=None, inc=None):
"A range function, that does accept float increments..."
if end == None:
end = start + 0.0
start = 0.0
if inc == None:
inc = 1.0
L = []
end = end - inc*0.0001 #to avoid numrical problems
while 1:
next = start + len(L) * inc
if inc > 0 and next >= end:
break
elif inc < 0 and next <= end:
break
L.append(next)
return L
def makeDistancesList(list):
"""Returns a list of distances between adjacent numbers in some input list.
E.g. [1, 1, 2, 3, 5, 7] -> [0, 1, 1, 2, 2]
"""
d = []
for i in range(len(list[:-1])):
d.append(list[i+1] - list[i])
return d
class Grid(Widget):
"""This makes a rectangular grid of equidistant stripes.
The grid contains an outer border rectangle, and stripes
inside which can be drawn with lines and/or as solid tiles.
The drawing order is: outer rectangle, then lines and tiles.
The stripes' width is indicated as 'delta'. The sequence of
stripes can have an offset named 'delta0'. Both values need
to be positive!
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="The grid's lower-left x position."),
y = AttrMapValue(isNumber, desc="The grid's lower-left y position."),
width = AttrMapValue(isNumber, desc="The grid's width."),
height = AttrMapValue(isNumber, desc="The grid's height."),
orientation = AttrMapValue(OneOf(('vertical', 'horizontal')),
desc='Determines if stripes are vertical or horizontal.'),
useLines = AttrMapValue(OneOf((0, 1)),
desc='Determines if stripes are drawn with lines.'),
useRects = AttrMapValue(OneOf((0, 1)),
desc='Determines if stripes are drawn with solid rectangles.'),
delta = AttrMapValue(isNumber,
desc='Determines the width/height of the stripes.'),
delta0 = AttrMapValue(isNumber,
desc='Determines the stripes initial width/height offset.'),
deltaSteps = AttrMapValue(isListOfNumbers,
desc='List of deltas to be used cyclically.'),
stripeColors = AttrMapValue(isListOfColors,
desc='Colors applied cyclically in the right or upper direction.'),
fillColor = AttrMapValue(isColorOrNone,
desc='Background color for entire rectangle.'),
strokeColor = AttrMapValue(isColorOrNone,
desc='Color used for lines.'),
strokeWidth = AttrMapValue(isNumber,
desc='Width used for lines.'),
rectStrokeColor = AttrMapValue(isColorOrNone, desc='Color for outer rect stroke.'),
rectStrokeWidth = AttrMapValue(isNumberOrNone, desc='Width for outer rect stroke.'),
)
def __init__(self):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
self.orientation = 'vertical'
self.useLines = 0
self.useRects = 1
self.delta = 20
self.delta0 = 0
self.deltaSteps = []
self.fillColor = colors.white
self.stripeColors = [colors.red, colors.green, colors.blue]
self.strokeColor = colors.black
self.strokeWidth = 2
def demo(self):
D = Drawing(100, 100)
g = Grid()
D.add(g)
return D
def makeOuterRect(self):
strokeColor = getattr(self,'rectStrokeColor',self.strokeColor)
strokeWidth = getattr(self,'rectStrokeWidth',self.strokeWidth)
if self.fillColor or (strokeColor and strokeWidth):
rect = Rect(self.x, self.y, self.width, self.height)
rect.fillColor = self.fillColor
rect.strokeColor = strokeColor
rect.strokeWidth = strokeWidth
return rect
else:
return None
def makeLinePosList(self, start, isX=0):
"Returns a list of p
|
ositions where to place lines."
w, h = self.width, self.height
if isX:
length = w
else:
length = h
if self.deltaSteps:
r = [start + self.delta0]
i = 0
while 1:
if r[-1] > start + length:
del r[-1]
break
r.append(r[-1] + self.deltaSteps[i % len(self.deltaSteps)])
i = i + 1
else:
|
r = frange(start + self.delta0, start + length, self.delta)
r.append(start + length)
if self.delta0 != 0:
r.insert(0, start)
#print 'Grid.makeLinePosList() -> %s' % r
return r
def makeInnerLines(self):
# inner grid lines
group = Group()
w, h = self.width, self.height
if self.useLines == 1:
if self.orientation == 'vertical':
r = self.makeLinePosList(self.x, isX=1)
for x in r:
line = Line(x, self.y, x, self.y + h)
line.strokeColor = self.strokeColor
line.strokeWidth = self.strokeWidth
group.add(line)
elif self.orientation == 'horizontal':
r = self.makeLinePosList(self.y, isX=0)
for y in r:
line = Line(self.x, y, self.x + w, y)
line.strokeColor = self.strokeColor
line.strokeWidth = self.strokeWidth
group.add(line)
return group
def makeInnerTiles(self):
# inner grid lines
group = Group()
w, h = self.width, self.height
# inner grid stripes (solid rectangles)
if self.useRects == 1:
cols = self.stripeColors
if self.orientation == 'vertical':
r = self.makeLinePosList(self.x, isX=1)
elif self.orientation == 'horizontal':
r = self.makeLinePosList(self.y, isX=0)
dist = makeDistancesList(r)
i = 0
for j in range(len(dist)):
if self.orientation == 'vertical':
x = r[j]
stripe = Rect(x, self.y, dist[j], h)
elif self.orientation == 'horizontal':
y = r[j]
stripe = Rect(self.x, y, w, dist[j])
stripe.fillColor = cols[i % len(cols)]
stripe.strokeColor = None
group.add(stripe)
i = i + 1
return group
def draw(self):
# general widget bits
group = Group()
group.add(self.makeOuterRect())
group.add(self.makeInnerTiles())
group.add(self.makeInnerLines(),name='_gridLines')
return group
class DoubleGrid(Widget):
"""This combines two ordinary Grid objects orthogonal to each other.
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="The grid's lower-left x position."),
y = AttrMapValue(isNumber, desc="The grid's lower-left y position."),
width = AttrMapValue(isNumber, desc="The grid's width."),
height = AttrMapValue(isNumber, desc="The grid's height."),
grid0 = AttrMapValue(None, desc="The first grid component."),
grid1 = AttrMapValue(None, desc="The second grid component."),
)
def __init__(self):
self.x = 0
s
|
LTD-Beget/sprutio-rpc
|
lib/FileManager/workers/ftp/createCopy.py
|
Python
|
gpl-3.0
| 6,690
| 0.003101
|
import os
import time
import traceback
from lib.FileManager.FM import REQUEST_DELAY
from lib.FileManager.workers.baseWorkerCustomer import BaseWorkerCustomer
class CreateCopy(BaseWorkerCustomer):
def __init__(self, paths, session, *args, **kwargs):
super(CreateCopy, self).__init__(*args, **kwargs)
self.paths = paths
self.session = session
def run(self):
try:
self.preload()
self.logger.info("CreateCopy process run")
ftp = self.get_ftp_connection(self.session)
# Временная хеш таблица для директорий по которым будем делать листинг
directories = {}
for path in self.paths:
dirname = ftp.path.dirname(path)
if dirname not in directories.keys():
directories[dirname] = []
directories[dirname].append(path)
# Массив хешей source -> target для каждого пути
copy_paths = []
# Эта содомия нужна чтобы составтить массив source -> target для создания копии файла с красивым именем
# с учетом того что могут быть совпадения
for dirname, dir_paths in directories.items():
dir_listing = ftp.listdir(dirname)
for dir_path in dir_paths:
i = 0
exist = False
if ftp.isdir(dir_path):
filename = os.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(os.path.basename(dir_path))
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
copy_paths.append({
'source': dir_path,
'target': ftp.path.join(dirname, copy_name)
})
while exist:
exist = False
if ftp.isdir(dir_path):
filename = ftp.path.basename(dir_path)
ext = ''
else:
filename, file_extension = ftp.path.splitext(dir_path)
ext = file_extension
copy_name = filename + ' copy' + ext if i == 0 else filename + ' copy(' + str(i) + ')' + ext
for dir_current_path in dir_listing:
if copy_name == dir_current_path:
exist = True
i += 1
break
if not exist:
dir_listing.append(copy_name)
copy_paths.append({
'source': dir_path,
'target': os.path.join(dirname, copy_name)
})
success_paths = []
error_paths = []
created_paths = []
next_tick = time.time() + REQUEST_DELAY
for copy_path in copy_paths:
try:
source_path = copy_path.get('source')
target_path = copy_path.get('target')
if ftp.isfile(source_path):
copy_result = ftp.copy_file(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
elif ftp.isdir(source_path):
copy_result = ftp.copy_dir(source_path, ftp.path.dirname(target_path), overwrite=True,
rename=target_path)
if not copy_result['success'] or len(copy_result['file_list']['failed']) > 0:
raise copy_result['error'] if copy_result['error'] is not None else Exception(
"Upload error")
else:
error_paths.append(source_path)
break
success_paths.append(source_path)
created_paths.append(ftp.file_info(target_path))
if time.time() > next_tick:
progress
|
= {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(
int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
self.on_running(self.status_id, progress=progress, pid=self.pid, pname=self.name)
nex
|
t_tick = time.time() + REQUEST_DELAY
except Exception as e:
self.logger.error("Error copy file %s , error %s" % (str(source_path), str(e)))
error_paths.append(source_path)
result = {
"success": success_paths,
"errors": error_paths,
"items": created_paths
}
# иначе пользователям кажется что скопировалось не полностью )
progress = {
'percent': round(float(len(success_paths)) / float(len(copy_paths)), 2),
'text': str(int(round(float(len(success_paths)) / float(len(copy_paths)), 2) * 100)) + '%'
}
time.sleep(REQUEST_DELAY)
self.on_success(self.status_id, data=result, progress=progress, pid=self.pid, pname=self.name)
except Exception as e:
result = {
"error": True,
"message": str(e),
"traceback": traceback.format_exc()
}
self.on_error(self.status_id, result, pid=self.pid, pname=self.name)
|
charettes/django-mutant
|
mutant/contrib/boolean/models.py
|
Python
|
mit
| 610
| 0
|
from __future__ import unicode_literals
from django.db.models import fields
from django.uti
|
ls.translation import ugettext_lazy as _
from ...models.field import FieldDefinition
class _BooleanMeta:
defined_field_category = _('Boolean')
class BooleanFieldDefinition(FieldDefinition):
class Me
|
ta(_BooleanMeta):
app_label = 'boolean'
proxy = True
defined_field_class = fields.BooleanField
class NullBooleanFieldDefinition(FieldDefinition):
class Meta(_BooleanMeta):
app_label = 'boolean'
proxy = True
defined_field_class = fields.NullBooleanField
|
hzlf/openbroadcast
|
website/apps/ashop/migrations/0015_auto__add_field_hardwarerelease_medium.py
|
Python
|
gpl-3.0
| 23,666
| 0.007817
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Hardwarerelease.medium'
db.add_column('ashop_hardwarerelease', 'medium', self.gf('django.db.models.fields.CharField')(default=0, max_length=10), keep_default=False)
def backwards(self, orm):
# Deleting field 'Hardwarerelease.medium'
db.delete_column('ashop_hardwarerelease', 'medium')
models = {
'alibrary.artist': {
'Meta': {'ordering': "('name',)", 'object_name': 'Artist'},
'biography': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Artist']", 'through': "orm['alibrary.ArtistMembership']", 'symmetrical': 'False'}),
'multiple': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'professions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['alibrary.Profession']", 'through': "orm['alibrary.ArtistProfessions']", 'symmetrical': 'False'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'})
},
'alibrary.artistmembership': {
'Meta': {'object_name': 'ArtistMembership'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_child'", 'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'artist_parent'", 'to': "orm['alibrary.Artist']"}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'artist_membership_profession'", 'null': 'True', 'to': "orm['alibrary.Profession']"})
},
'alibrary.artistprofessions': {
'Meta': {'object_name': 'ArtistProfessions'},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Artist']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['alibrary.Profession']"})
},
'alibrary.label': {
'Meta': {'ordering': "('name',)", 'object_name': 'Label'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email_main': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'label_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'labelcode': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'label_children'", 'null': 'True', 'to': "orm['alibrary.Label']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'7437b6be-ab03-4a9d-af4e-dbdd430c819e'", 'max_length': '36'})
},
'alibrary.profession': {
'Meta': {'ordering': "('name',)", 'object_name': 'Profession'},
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_listing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'})
},
'alibrary.release': {
'Meta': {'ordering': "('releasedate',)", 'object
|
_name': 'Release'},
'catalognumber': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extra_artists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['alibrary.Profession']", 'null': 'True', 'through': "orm['alibrary.ReleaseExtraartists']", 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_folder'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'release_label'", 'to': "orm['alibrary.Label']"}),
'main_image': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_main_image'", 'null': 'True', 'to': "orm['filer.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'placeholder_1': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'pressings': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'max_length': '12'}),
'releasedate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'releasetype': ('django.db.models.fields.CharField', [], {'default': "'other'", 'max_length': '12'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'updated': ('django.d
|
SunDwarf/Jokusoramame
|
jokusoramame/plugins/misc.py
|
Python
|
gpl-3.0
| 5,938
| 0.000168
|
import random
import re
from io import BytesIO
from typing import Awaitable, List
import matplotlib.pyplot as plt
import seaborn as sns
from curio.thread import async_thread
from curious.commands import Context, Plugin
from curious.commands.decorators import autoplugin, ratelimit
from yapf.yapflib.style import CreatePEP8Style
from yapf.yapflib.yapf_api import FormatCode
from jokusoramame.utils import rgbize
code_regexp = re.compile(r"```([^\n]+)\n?(.+)\n?```", re.DOTALL)
ADJECTIVES = {
"Trans-Exclusionary ": 1,
"Smithian ": 2,
"Ricardian ": 2,
"Randian ": 3,
"Hegelian ": 3,
"Synthesist ": 3,
"Woke ": 4,
"Vegan ": 4,
"Green ": 6,
"Insurrectionary ": 6,
"Anti-Imperialist ": 6,
"Jewish ": 8,
"Bolshevik ": 8,
"Post-left ": 8,
"Inclusive ": 9,
"Individualist ": 9,
"Queer ": 10,
"Atheist ": 10,
"Liberal ": 10,
"Libertarian ": 10,
"Conservative ": 10,
"Social ": 12,
"Islamic ": 12,
"Radical ": 12,
"Catholic ": 12,
"Esoteric ": 12,
"Christian ": 12,
"Progressive ": 12,
"Post-Colonial ": 12,
"Democratic ": 1
|
3,
"": 30
}
PREFIXES = {
"Alt-": 1,
"Bio-": 1,
"Taoist ": 2,
"Left ": 3,
"Post-": 3,
"Anarcha-": 3,
"Avant Garde ": 3,
"Eco-": 4,
"Communal ": 6,
"Afro-": 8,
"Ethno-": 8,
"Ultra-": 8,
"Neo-": 10,
"Pan-": 10,
"Anti-": 10,
"Paleo-": 10,
"Techno-": 10,
"Market ": 10,
"Revolutionary
|
": 10,
"Crypto-": 12,
"Anarcho-": 12,
"National ": 12,
"Orthodox ": 12,
"": 40
}
IDEOLOGIES = {
"Posadism": 1,
"Sexualism": 1,
"Kemalism": 2,
"Unruheism": 2,
"Distributism": 2,
"Titoism": 3,
"Putinism": 3,
"Makhnovism": 3,
"Georgism": 4,
"Keynesian": 4,
"Platformism": 4,
"Municipalism": 5,
"Confederalism": 5,
"Egoism": 6,
"Luddite": 6,
"Agorism": 6,
"Unionism": 6,
"Thatcherite": 6,
"Minarchism": 7,
"Ba'athism": 8,
"Trotskyism": 8,
"Syndicalism": 8,
"Luxemburgism": 8,
"Strasserism": 10,
"Maoism": 12,
"Fascism": 12,
"Marxism": 12,
"Zionism": 12,
"Centrism": 12,
"Pacifism": 12,
"Leninism": 12,
"Populism": 12,
"Futurism": 12,
"Feminism": 12,
"Humanism": 12,
"Mutualism": 12,
"Communism": 12,
"Stalinism": 12,
"Globalism": 12,
"Socialism": 12,
"Capitalism": 12,
"Monarchism": 12,
"Primitivism": 12,
"Nationalism": 12,
"Transhumanism": 12,
"Traditionalism": 12,
"Environmentalism": 12,
"Accelerationism": 12
}
SUFFIXES = {
" in One Country": 1,
" with Masonic elements": 1,
' with "rational debate"': 1,
" with Phlegmsky's vanguardism": 1,
" with Chinese characteristics": 1,
" with a new mode of production": 1,
"": 100
}
@autoplugin
class Misc(Plugin):
"""
Miscellaneous commands.
"""
async def command_ideology(self, ctx: Context):
"""
Creates an ideology just for you!
"""
message = ''
for d in (ADJECTIVES, PREFIXES, IDEOLOGIES, SUFFIXES):
message += random.choices(list(d.keys()), list(d.values()))[0]
await ctx.channel.messages.send(message)
@ratelimit(limit=1, time=30)
async def command_palette(self, ctx: Context, *, colours: List[int]):
"""
Shows a palette plot.
"""
pal_colours = rgbize(colours[:12])
@async_thread
def plot_palette() -> Awaitable[BytesIO]:
with ctx.bot._plot_lock:
sns.palplot(pal_colours, size=1)
plt.tight_layout() # remove useless padding
buf = BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
plt.clf()
plt.cla()
return buf
@async_thread()
def plot_dark_palette() -> Awaitable[BytesIO]:
with ctx.bot._plot_lock:
with plt.style.context("dark_background"):
sns.palplot(pal_colours, size=1)
plt.tight_layout() # remove useless padding
buf = BytesIO()
plt.savefig(buf, format="png")
buf.seek(0)
plt.clf()
plt.cla()
return buf
if ctx.bot._plot_lock.locked():
await ctx.channel.messages.send("Waiting for plot lock...")
async with ctx.channel.typing:
buf = await plot_palette()
buf2 = await plot_dark_palette()
await ctx.channel.messages.upload(fp=buf.read(), filename="plot.png")
await ctx.channel.messages.upload(fp=buf2, filename="plot_dark.png")
def _normalize_language(self, lang: str) -> str:
"""
Normalizes a language name into consistency.
"""
lang = lang.lower().rstrip("\n")
print(repr(lang))
if lang in ["py", "python", "py3k"]:
return "python"
return lang
async def command_reformat(self, ctx: Context, *, message: str):
"""
Reformats some code.
"""
code_match = code_regexp.match(message)
if code_match is None:
return await ctx.channel.messages.send(":x: Could not find a valid code block with "
"language.")
language, code = code_match.groups()
code = code.replace("\t", " ")
language = self._normalize_language(language)
if language == "python":
# yapfify
style = CreatePEP8Style()
style['COLUMN_LIMIT'] = 100
reformatted, changes = FormatCode(code, style_config=style)
return await ctx.channel.messages.send(f"```py\n{reformatted}```")
return await ctx.channel.messages.send(":x: Unknown language.")
|
brandonPurvis/osf.io
|
tests/test_notifications.py
|
Python
|
apache-2.0
| 59,970
| 0.003035
|
import collections
import datetime
import mock
import pytz
from babel import dates, Locale
from schema import Schema, And, Use, Or
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from nose.tools import * # noqa PEP8 asserts
from framework.auth import Auth
from framework.auth.core import User
from framework.auth.signals import contributor_removed
from framework.auth.signals import node_deleted
from framework.guid.model import Guid
from website.notifications.tasks import get_users_emails, send_users_email, group_by_node, remove_notifications
from website.notifications import constants
from website.notifications.model import NotificationDigest
from website.notifications.model import NotificationSubscription
from website.notifications import emails
from website.notifications import utils
from website.project.model import Node, Comment
from website import mails
from website.util import api_url_for
from website.util import web_url_for
from tests import factories
from tests.base import capture_signals
from tests.base import OsfTestCase
class TestNotificationsModels(OsfTestCase):
def setUp(self):
super(TestNotificationsModels, self).setUp()
# Create project with component
self.user = factories.UserFactory()
self.consolidate_auth = Auth(user=self.user)
self.parent = factories.ProjectFactory(creator=self.user)
self.node = factories.NodeFactory(creator=self.user, parent=self.parent)
def test_has_permission_on_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(non_admin_user, 'read')
)
def test_check_user_has_permission_excludes_deleted_components(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
sub_component.add_contributor(contributor=non_admin_user)
sub_component.is_deleted = True
sub_component.save()
sub_component2 = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_does_not_have_permission_on_private_node_child(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_user_child_node_permissions_false_if_no_children(self):
non_admin_user = factories.UserFactory()
parent = factories.ProjectFactory()
parent.add_contributor(contributor=non_admin_user, permissions=['read'])
parent.save()
node = factories.NodeFactory(parent=parent, category='project')
assert_false(
node.has_permission_on_children(non_admin_user,'read')
)
def test_check_admin_has_permissions_on_private_component(self):
parent = factories.ProjectFactory()
node = factories.NodeFactory(parent=parent, category='project')
sub_component = factories.NodeFactory(parent=node)
assert_true(
node.has_permission_on_children(parent.creator,'read')
)
def test_check_user_private_node_child_permissions_excludes_pointers(self):
user = factories.UserFactory()
parent = factories.ProjectFactory()
pointed = factories.ProjectFactory(contributor=user)
parent.add_pointer(pointed, Auth(parent.creator))
parent.save()
assert_false(
parent.has_permission_on_children(user,'read')
)
class TestSubscriptionView(OsfTestCase):
def setUp(self):
super(TestSubscriptionView, self).setUp()
self.node = factories.NodeFactory()
self.user = self.node.creator
def test_create_new_subscription(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.find_one(Q('_id', 'eq', event_id))
# check that user was added to notification_type field
assert_equal(payload['id'], s.owner._id)
assert_equal(payload['event'], s.event_name)
assert_in(self.node.creator, getattr(s, payload['notification_type']))
# change subscription
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_digest'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.auth)
s.reload()
assert_false(self.node.creator in getattr(s, payload['notification_type']))
assert_in(self.node.creator, getattr(s, new_payload['notification_type']))
def test_adopt_parent_subscription_default(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
event_id = self.node._id + '_' + 'comments'
# confirm subscription was not created
with assert_raises(NoResultsFound):
NotificationSubscription.find_one(Q('_id', 'eq', event_id))
def test_change_subscription_to_adopt_parent_subscription_removes_user(self):
payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'email_transactional'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, payload, auth=self.node.creator.auth)
# check that subscription was created
event_id = self.node._id + '_' + 'comments'
s = NotificationSubscription.find_one(Q('_id', 'eq', event_id))
# change subscription to adopt_parent
new_payload = {
'id': self.node._id,
'event': 'comments',
'notification_type': 'adopt_parent'
}
url = api_url_for('configure_subscription')
self.app.post_json(url, new_payload, auth=self.node.creator.
|
auth)
s.reload()
# assert that user is removed from the subscription entirely
for n in constants.NOTIFICATION_TYPES:
assert_false(self.node.creator in getattr(s, n))
class TestRemoveContributor(OsfTestCase):
def setUp(self):
super(OsfTestCase, self).setUp()
self.project = factories.ProjectFactory()
self.contributor =
|
factories.UserFactory()
self.project.add_contributor(contributor=self.contributor, permissions=['read'])
self.project.save()
self.subscription = factories.NotificationSubscriptionFactory(
_id=self.project._id + '_comments',
owner=self.project
)
self.subscription.save()
self.subscription.email_transactional.append(self.contributor)
self.subscription.email_transactional.append(self.project.creator)
self
|
ferriman/SSandSP
|
pyxel-test/venv/lib/python3.8/site-packages/pyxel/editor/octave_bar.py
|
Python
|
gpl-3.0
| 1,129
| 0.000886
|
import pyxel
from pyxel.ui import Widget
from .constants import OCTAVE_BAR_BACKGROUND_COLOR, OCTAVE_BAR_COLOR
class OctaveBar(Widget):
def __init__(self, parent, x, y):
super().__init__(parent, x, y, 4, 123)
self.add_event_handler("mouse_down", self.__on_mouse_down)
self.add_event_handler("mouse_drag", self.__on_mouse_drag)
self.add_event_handler("mouse_hover", self.__on_mouse_hover)
self.add_event_handler("draw", self.__on_draw)
def __on_mouse_down(self, key, x, y):
if key != pyxel.MOUSE_LEFT_BUTTON:
return
x -= self.x
y -= self.y
self.parent.octave = min(max(3 - ((y - 12) // 24), 0), 3)
def __on_mouse_drag(self, key, x, y, dx, dy):
self.__on_mouse_down(key, x, y)
def __on_mouse_hover(self, x, y):
self.parent.help_message = "OCTAVE:PAGEUP/PAGEDOWN
|
"
def __on_draw(self):
pyxel.rect(self
|
.x, self.y, self.width, self.height, OCTAVE_BAR_BACKGROUND_COLOR)
x = self.x + 1
y = self.y + 1 + (3 - self.parent.octave) * 24
pyxel.rect(x, y, 2, 47, OCTAVE_BAR_COLOR)
|
bezhermoso/powerline
|
tests/test_segments.py
|
Python
|
mit
| 79,449
| 0.022501
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import os
from functools import partial
from collections import namedtuple
from time import sleep
from platform import python_implementation
from powerline.segments import shell, tmux, pdb, i3wm
from powerline.lib.vcs import get_fallback_create_watcher
from powerline.lib.unicode import out_u
import tests.vim as vim_module
from tests.lib import Args, urllib_read, replace_attr, new_module, replace_module_module, replace_env, Pl
from tests import TestCase, SkipTest
def get_dummy_guess(**kwargs):
if 'directory' in kwargs:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), **kwargs)
else:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), directory=path, **kwargs)
return guess
class TestShell(TestCase):
def test_last_status(self):
pl = Pl()
segment_info = {'args': Args(last_exit_code=10)}
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': '10', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 0
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = None
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = 'sigsegv'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 'sigsegv+core'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail']}
])
def test_last_pipe_status(self):
pl = Pl()
segment_info = {'args': Args(last_pipe_status=[])}
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 0, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 2, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': '2', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 'sigsegv+core']
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv+core', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'con
|
tents': 'sigsegv+core', 'hig
|
hlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
def test_jobnum(self):
pl = Pl()
segment_info = {'args': Args(jobnum=0)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '0')
segment_info = {'args': Args(jobnum=1)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '1')
def test_continuation(self):
pl = Pl()
self.assertEqual(shell.continuation(pl=pl, segment_info={}), [{
'contents': '',
'width': 'auto',
'highlight_groups': ['continuation:current', 'continuation'],
}])
segment_info = {'parser_state': 'if cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
'width': 'auto',
'align': 'r',
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': 'IF'}), [
{
'contents': 'IF',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': None}), [
{
'contents': '',
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
segment_info = {'parser_state': 'then then then cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(shell, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_h
|
gwpy/gwpy
|
gwpy/timeseries/io/wav.py
|
Python
|
gpl-3.0
| 3,595
| 0
|
# -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2017-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Read/write WAV files using `scipy.signal.wavfile`
"""
import struct
import wave
import numpy
from scipy.io import wavfile
from .. import TimeSeries
from ...io import registry as io_registry
WAV_SIGNATURE = ('RIFF', 'WAVE')
def read(fobj, **kwargs):
"""Read a WAV file into a `TimeSeries`
Parameters
----------
fobj : `file`, `str`
open file-like object or filename to read from
**kwargs
all keyword arguments are passed onto :func:`scipy.io.wavfile.read`
See also
--------
scipy.io.wavfile.read
for details on how the WAV file is actually read
Examples
--------
>>> from gwpy.timeseries import TimeSeries
>>> t = TimeSeries.read('test.wav')
"""
fsamp, arr = wavfile.read(fobj, **kwargs)
return TimeSeries(arr, sample_rate=fsamp)
def write(series, output, scale=None):
"""Write a `TimeSeries` to a WAV file
Parameters
----------
series : `TimeSeries`
the series to write
output : `fi
|
le`, `str`
the file object or filename to write to
scale : `float`, optional
the factor to apply to scale the data to (-1.0, 1.0),
pass `scale=1` to not apply any scale, otherwise
the data will be auto-scaled
See also
--------
scipy.io.wavfile.write
for details on how the WAV file is actually written
Examples
--------
>>> from gwpy.timeseries import TimeSeries
>>> t
|
= TimeSeries([1, 2, 3, 4, 5])
>>> t = TimeSeries.write('test.wav')
"""
fsamp = int(series.sample_rate.decompose().value)
if scale is None:
scale = 1 / numpy.abs(series.value).max()
data = (series.value * scale).astype('float32')
return wavfile.write(output, fsamp, data)
def is_wav(origin, filepath, fileobj, *args, **kwargs):
"""Identify a file as WAV
See `astropy.io.registry` for details on how this function is used.
"""
# pylint: disable=unused-argument
if origin == 'read' and fileobj is not None:
loc = fileobj.tell()
fileobj.seek(0)
try:
riff, _, fmt = struct.unpack('<4sI4s', fileobj.read(12))
if isinstance(riff, bytes):
riff = riff.decode('utf-8')
fmt = fmt.decode('utf-8')
return riff == WAV_SIGNATURE[0] and fmt == WAV_SIGNATURE[1]
except (UnicodeDecodeError, struct.error):
return False
finally:
fileobj.seek(loc)
elif filepath is not None:
return filepath.endswith(('.wav', '.wave'))
else:
try:
wave.open(args[0])
except (wave.Error, AttributeError):
return False
else:
return True
io_registry.register_reader('wav', TimeSeries, read)
io_registry.register_writer('wav', TimeSeries, write)
io_registry.register_identifier('wav', TimeSeries, is_wav)
|
Ichimonji10/robottelo
|
tests/foreman/cli/test_filter.py
|
Python
|
gpl-3.0
| 6,479
| 0
|
# -*- encoding: utf-8 -*-
"""Test for Roles CLI
@Requirement: Filter
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: CLI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from robottelo.cli.base import CLIReturnCodeError
from robottelo.cli.factory import (
make_filter,
make_location,
make_org,
make_role,
)
from robottelo.cli.filter import Filter
from robottelo.cli.role import Role
from robottelo.decorators import tier1
from robottelo.test import APITestCase
class FilterTestCase(APITestCase):
@classmethod
def setUpClass(cls):
"""Search for Organization permissions. Set ``cls.perms``."""
super(FilterTestCase, cls).setUpClass()
cls.perms = [
permission['name']
for permission in Filter.available_permissions(
{'resource-type': 'User'})
]
def setUp(self):
"""Create a role that a filter would be assigned """
super(FilterTestCase, self).setUp()
self.role = make_role()
@tier1
def test_positive_create_with_permission(self):
"""Create a filter and assign it some permissions.
@id: 6da6c5d3-2727-4eb7-aa15-9f7b6f91d3b2
@Assert: The created filter has the assigned permissions.
"""
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
self.assertEqual(
set(filter_['permissions'].split(", ")),
set(self.perms)
)
@tier1
def test_positive_create_with_org(self):
"""Create a filter and assign it some permissions.
@id: f6308192-0e1f-427b-a296-b285f6684691
@Assert: The created filter has the assigned permissions.
"""
org = make_org()
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'organization-ids': org['id'],
})
# we expect here only only one organization, i.e. first element
self.assertEqual(filter_['organizations'][0], org['name'])
@tier1
def test_positive_create_with_loc(self):
"""Create a filter and assign it some permissions.
@id: d7d1969a-cb30-4e97-a9a3-3a4aaf608795
@Assert: The created filter has the assigned permissions.
"""
loc = make_location()
# Assign filter to created role
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'location-ids': loc['id'],
})
# we expect here only only one location, i.e. first element
self.assertEqual(filter_['locations'][0], loc['name'])
@tier1
def test_positive_delete(self):
"""Create a filter and delete it afterwards.
@id: 97d1093c-0d49-454b-86f6-f5be87b32775
@Assert: The deleted filter cannot be fetched.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
Filter.delete({'id': filter_['id']})
with self.assertRaises(CLIReturnCodeError):
Filter.info({'id': filter_['id']})
@tier1
def test_positive_delete_role(self):
"""Create a filter and delete the role it points at.
@id: e2adb6a4-e408-4912-a32d-2bf2c43187d9
@Assert: The filter cannot be fetched.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
# A filter depends on a role. Deleting a role implicitly deletes the
# filter pointing at it.
Role.delete({'id': self.role['id']})
with self.assertRaises(CLIReturnCodeError):
Role.info({'id': self.role['id']})
with self.assertRaises(CLIReturnCodeError):
Filter.info({'id': filter_['id']})
@tier1
def test_positive_update_permissions(self):
"""Create a filter and update its permissions.
@id: 3d6a52d8-2f8f-4f97-a155-9b52888af16e
@Assert: Permissions updated.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
new_perms = [
permission['name']
for permission in Filter.available_permissions(
{'resource-type': 'User'})
]
Filter.update({
|
'id': filter_['id'],
'permissions': new_perms
})
filter_ = Filter.info({'id': filter_['id']})
self.assertEqual(
set(filter_['permissions'].split(", ")),
set(new_perms)
|
)
@tier1
def test_positive_update_role(self):
"""Create a filter and assign it to another role.
@id: 2950b3a1-2bce-447f-9df2-869b1d10eaf5
@Assert: Filter is created and assigned to new role.
"""
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
})
# Update with another role
new_role = make_role()
Filter.update({
'id': filter_['id'],
'role-id': new_role['id'],
})
filter_ = Filter.info({'id': filter_['id']})
self.assertEqual(filter_['role'], new_role['name'])
@tier1
def test_positive_update_org_loc(self):
"""Create a filter and assign it to another organization and location.
@id: 9bb59109-9701-4ef3-95c6-81f387d372da
@Assert: Filter is created and assigned to new org and loc.
"""
org = make_org()
loc = make_location()
filter_ = make_filter({
'role-id': self.role['id'],
'permissions': self.perms,
'organization-ids': org['id'],
'location-ids': loc['id']
})
# Update org and loc
new_org = make_org()
new_loc = make_location()
Filter.update({
'id': filter_['id'],
'permissions': self.perms,
'organization-ids': new_org['id'],
'location-ids': new_loc['id']
})
filter_ = Filter.info({'id': filter_['id']})
# We expect here only one organization and location
self.assertEqual(filter_['organizations'][0], new_org['name'])
self.assertEqual(filter_['locations'][0], new_loc['name'])
|
nifannn/HackerRank
|
Practice/Python/Strings/string_validators.py
|
Python
|
mit
| 281
| 0.007117
|
if __name__ == '__main__':
s = input()
is_list = list(zip(*[[c.isalnum(), c.is
|
alpha(), c.isdigit(), c.islower(), c.isupper()] for c in s]))
print_list = [True if True in is_result else False for is_result in is_list]
for result in print_list:
|
print(result)
|
crosswalk-project/crosswalk-test-suite
|
embeddingapi/embedding-api-android-tests/inst.apk.py
|
Python
|
bsd-3-clause
| 3,916
| 0.001277
|
#!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARAMETERS = None
ADB_CMD = "adb"
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code is not None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def uninstResources():
action_status = True
cmd = "%s -s %s shell rm -r /sdcard/device_files" % (
ADB_CMD, PARAMETERS.device)
(return_code, output) = doCMD(cmd)
return action_status
def instResources():
action_status = True
source_path = os.path.join(SCRIPT_DIR, "device_files")
if os.path.exists(source_path):
cmd = "%s -s %s push %s /sdcard/device_files" % (
ADB_CMD, PARAMETERS.device,source_path)
(return_code, output) = doCMD(cmd)
return action_status
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
index_start = str(file).index("_")
index_end = str(file).index(".")
cmd = "%s -s %s uninstall org.xwalk.embedding.test.%s" % (
ADB_CMD, PARAMETERS.device, str(file)[index_start + 1: index_end])
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
if action_status:
uninstResources()
return action_status
def instPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
for file in files:
if file.endswith(".apk"):
cmd = "%s -s %s install %s" % (ADB_CMD,
PARAMETERS.device, os.path.join(root, file))
(return_code, output) = doCMD(cmd)
for line in output:
if "Failure" in line:
action_status = False
break
if action_status:
instResources()
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
|
except Exception as e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.device:
(return_code, output) = doCMD("adb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
if not PARAMETERS.
|
device:
print "No device found"
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
|
baverman/fmd
|
fmd/run.py
|
Python
|
mit
| 262
| 0.007634
|
def run():
import sys, os
try:
|
uri = sys.argv[1]
except IndexError:
uri = os.getcwd()
import gtk
from .app import App
from uxie.utils import idle
application = App()
idle(application.open, uri)
|
gtk.main()
|
Diti24/python-ivi
|
ivi/agilent/agilentE4431B.py
|
Python
|
mit
| 1,495
| 0.001338
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2014-2016 Alex Forencich
Permission is hereby grante
|
d, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or s
|
ell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilentBaseESGD import *
class agilentE4431B(agilentBaseESGD):
"Agilent E4431B ESG-D IVI RF signal generator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'ESG-D4000B')
super(agilentE4431B, self).__init__(*args, **kwargs)
self._frequency_low = 250e3
self._frequency_high = 2e9
|
gitaarik/jazzchords
|
apps/users/admin.py
|
Python
|
gpl-3.0
| 213
| 0
|
from django.contrib import admi
|
n
from .models import User
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email', 'is_active', 'is_staff', 'validated')
admin.site.register(User, UserAdmin)
| |
hiway/micropython
|
tests/basics/list_slice_assign.py
|
Python
|
mit
| 622
| 0.017685
|
# test slices; on
|
ly 2 argument version supported by Micro Python at the moment
x = list(range(10))
# Assignment
l = list(x)
l[1:3] = [10, 20]
print(l)
l = list(x)
l[1:3] = [10]
print(l)
l = list(x)
l[1:3] = []
print(l)
l = list(x)
del l[1:3]
print(l)
l = list(x)
l[:3] = [10, 20]
print(l)
l = list(x)
l[:3] = []
print(l)
l = list(x)
del l[:3]
print(l)
l = list(x)
l[:-3] = [10, 20]
print(l)
l = list(x)
l[:-3] = []
print(l)
l = list(x)
del l[:-3]
print(l)
# a
|
ssign a tuple
l = [1, 2, 3]
l[0:1] = (10, 11, 12)
print(l)
# RHS of slice must be an iterable
try:
[][0:1] = 123
except TypeError:
print('TypeError')
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/test/test_concurrent_futures.py
|
Python
|
mit
| 20,163
| 0.000893
|
import test.support
# Skip tests if _multiprocessing wasn't built.
test.support.import_module('_multiprocessing')
# Skip tests if sem_open implementation is broken.
test.support.import_module('multiprocessing.synchronize')
# import threading after _multiprocessing to raise a more revelant error
# message: "No module named _multiprocessing". _multiprocessing is not compiled
# without thread support.
test.support.import_module('threading')
import threading
import time
import unittest
from concurrent import futures
from concurrent.futures._base import (
PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future)
import concurrent.futures.process
def create_future(state=PENDING, exception=None, result=None):
f = Future()
f._state = state
f._exception = exception
f._result = result
return f
PENDING_FUTURE = create_future(state=PENDING)
RUNNING_FUTURE = create_future(state=RUNNING)
CANCELLED_FUTURE = create_future(state=CANCELLED)
CANCELLED_AND_NOTIFIED_FUTURE = create_future(state=CANCELLED_AND_NOTIFIED)
EXCEPTION_FUTURE = create_future(state=FINISHED, exception=IOError())
SUCCESSFUL_FUTURE = create_future(state=FINISHED, result=42)
def mul(x, y):
return x * y
def sleep_and_raise(t):
time.sleep(t)
raise Exception('this is an exception')
class ExecutorMixin:
worker_count = 5
def _prime_executor(self):
# Make sure that the executor is ready to do work before running the
# tests. This should reduce the probability of timeouts in the tests.
futures = [self.executor.submit(time.sleep, 0.1)
for _ in range(self.worker_count)]
for f in futures:
f.result()
class ThreadPoolMixin(ExecutorMixin):
def setUp(self):
self.executor = futures.ThreadPoolExecutor(max_workers=5)
self._prime_executor()
def tearDown(self):
self.executor.shutdown(wait=True)
class ProcessPoolMixin(ExecutorMixin):
def setUp(self):
try:
self.executor = futures.ProcessPoolExecutor(max_workers=5)
except NotImplementedError as e:
self.skipTest(str(e))
self._prime_executor()
def tearDown(self):
self.executor.shutdown(wait=True)
class ExecutorShutdownTest(unittest.TestCase):
def test_run_after_shutdown(self):
self.executor.shutdown()
self.assertRaises(RuntimeEr
|
ror,
self.executor.submit,
pow, 2, 5)
class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest):
def _prime_executor(self):
pass
def test_threads_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._threads), 3)
self.executor.shutdow
|
n()
for t in self.executor._threads:
t.join()
def test_context_manager_shutdown(self):
with futures.ThreadPoolExecutor(max_workers=5) as e:
executor = e
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for t in executor._threads:
t.join()
def test_del_shutdown(self):
executor = futures.ThreadPoolExecutor(max_workers=5)
executor.map(abs, range(-5, 5))
threads = executor._threads
del executor
for t in threads:
t.join()
class ProcessPoolShutdownTest(ProcessPoolMixin, ExecutorShutdownTest):
def _prime_executor(self):
pass
def test_processes_terminate(self):
self.executor.submit(mul, 21, 2)
self.executor.submit(mul, 6, 7)
self.executor.submit(mul, 3, 14)
self.assertEqual(len(self.executor._processes), 5)
processes = self.executor._processes
self.executor.shutdown()
for p in processes:
p.join()
def test_context_manager_shutdown(self):
with futures.ProcessPoolExecutor(max_workers=5) as e:
processes = e._processes
self.assertEqual(list(e.map(abs, range(-5, 5))),
[5, 4, 3, 2, 1, 0, 1, 2, 3, 4])
for p in processes:
p.join()
def test_del_shutdown(self):
executor = futures.ProcessPoolExecutor(max_workers=5)
list(executor.map(abs, range(-5, 5)))
queue_management_thread = executor._queue_management_thread
processes = executor._processes
del executor
queue_management_thread.join()
for p in processes:
p.join()
class WaitTests(unittest.TestCase):
def test_first_completed(self):
future1 = self.executor.submit(mul, 21, 2)
future2 = self.executor.submit(time.sleep, 5)
done, not_done = futures.wait(
[CANCELLED_FUTURE, future1, future2],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(set([future1]), done)
self.assertEqual(set([CANCELLED_FUTURE, future2]), not_done)
def test_first_completed_some_already_completed(self):
future1 = self.executor.submit(time.sleep, 2)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE, future1],
return_when=futures.FIRST_COMPLETED)
self.assertEqual(
set([CANCELLED_AND_NOTIFIED_FUTURE, SUCCESSFUL_FUTURE]),
finished)
self.assertEqual(set([future1]), pending)
def test_first_exception(self):
future1 = self.executor.submit(mul, 2, 21)
future2 = self.executor.submit(sleep_and_raise, 5)
future3 = self.executor.submit(time.sleep, 10)
finished, pending = futures.wait(
[future1, future2, future3],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([future1, future2]), finished)
self.assertEqual(set([future3]), pending)
def test_first_exception_some_already_complete(self):
future1 = self.executor.submit(divmod, 21, 0)
future2 = self.executor.submit(time.sleep, 5)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1, future2],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
future1]), finished)
self.assertEqual(set([CANCELLED_FUTURE, future2]), pending)
def test_first_exception_one_already_failed(self):
future1 = self.executor.submit(time.sleep, 2)
finished, pending = futures.wait(
[EXCEPTION_FUTURE, future1],
return_when=futures.FIRST_EXCEPTION)
self.assertEqual(set([EXCEPTION_FUTURE]), finished)
self.assertEqual(set([future1]), pending)
def test_all_completed(self):
future1 = self.executor.submit(divmod, 2, 0)
future2 = self.executor.submit(mul, 2, 21)
finished, pending = futures.wait(
[SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2],
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([SUCCESSFUL_FUTURE,
CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
future1,
future2]), finished)
self.assertEqual(set(), pending)
def test_timeout(self):
future1 = self.executor.submit(mul, 6, 7)
future2 = self.executor.submit(time.sleep, 10)
finished, pending = futures.wait(
[CANCELLED_AND_NOTIFIED_FUTURE,
EXCEPTION_FUTURE,
SUCCESSFUL_FUTURE,
future1, future2],
timeout=5,
return_when=futures.ALL_COMPLETED)
self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
|
the-zebulan/CodeWars
|
katas/kyu_4/ip_validation.py
|
Python
|
mit
| 276
| 0
|
from re import compile, match
REGEX = compile(r'((25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){3}'
r'(25[0-5]|2[0-4]\d|1\d\d|[
|
1-9]\d|\d)$')
def is_valid_IP(strng):
""" is_valid_ip == PEP8 (forced mixedCase by CodeWars
|
) """
return bool(match(REGEX, strng))
|
seblabbe/slabbe
|
slabbe/graph_directed_IFS.py
|
Python
|
gpl-2.0
| 26,146
| 0.004171
|
# -*- coding: utf-8 -*-
r"""
Graph-directed iterated function system (GIFS)
See [JK14]_ or [BV20]_ or
- http://larryriddle.agnesscott.org/ifs/ifs.htm
- https://encyclopediaofmath.org/wiki/Iterated_function_system
We allow the functions to be contracting or not. When the functions are
inflations, it allows to represent inflation rules and stone inflations as
in Definition 5.17 of [BG13]_.
EXAMPLES:
The Cantor set::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3); f1
x |-> [1/3] x + [0]
sage: f2 = F(1/3, vector([2/3])); f2
x |-> [1/3] x + [2/3]
sage: cantor_IFS = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: cantor_IFS
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
Fibonacci substitution::
sage: m = WordMorphism('a->ab,b->a')
sage: fibo_ifs = GIFS.from_one_dimensional_substitution(m)
sage: fibo_ifs
GIFS defined by 3 maps on Vector space of dimension 1 over
Number Field in root with defining polynomial x^2 - x - 1 with
root = 1.618033988749895?
Its element-wise Galois conjugate is a contracting IFS::
sage: fibo_ifs.galois_conjugate().pp()
GIFS defined by 3 maps on Vector space of dimension 1 over Number Field in root with defining polynomial x^2 - x - 1 with root = 1.618033988749895?
edge (0,0):
x |-> [-root + 1] x + [0]
edge (1,0):
x |-> [-root + 1] x + [1]
edge (0,1):
x |-> [-root + 1] x + [0]
Direct Product of 2 Fibonacci::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: fibo2_ifs = GIFS.from_two_dimensional_substitution(s)
sage: fibo2_ifs
GIFS defined by 9 maps on Vector space of dimension 2 over
Number Field in rootX with defining polynomial x^2 - x - 1 with
rootX = 1.618033988749895?
REFERENCES:
.. [JK14] Jolivet, Timo, et Jarkko Kari. « Undecidable Properties of Self-Affine
Sets and Multi-Tape Automata ». In Mathematical Foundations of Computer
Science 2014, édité par Erzsébet Csuhaj-Varjú, Martin Dietzfelbinger,
et Zoltán Ésik, 8634:352‑64. Berlin, Heidelberg: Springer Berlin
Heidelberg, 2014. https://doi.org/10.1007/978-3-662-44522-8_30.
.. [BV20] Michael Barnsley, Andrew Vince. Tilings from Graph Directed
Iterated Function Systems. Geometriae Dedicata, 9 août 2020.
https://doi.org/10.1007/s10711-020-00560-4
.. [BG13] Michael Baake, Uwe Grimm. Aperiodic order. Vol. 1. Vol. 149.
Encyclopedia of Mathematics and its Applications. Cambridge University
Press, Cambridge, 2013. http://www.ams.org/mathscinet-getitem?mr=3136260.
.. [BFG19] Michael Baake, Natalie Priebe Frank, Uwe Grimm. Three variations on a
theme by Fibonacci. http://arxiv.org/abs/1910.00988
"""
#*****************************************************************************
# Copyright (C) 2020 Sebastien Labbe <slabqc@gmail.com>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import absolute_import, print_function
import itertools
from sage.modules.free_module_element import vector
class GraphDirectedIteratedFunctionSystem(object):
r"""
INPUT:
- ``module`` -- the module on which the functions are defined
- ``edges`` -- list, list of triples (u,v,f) where f is a function
associated to the directed edge (u,v).
EXAMPLES:
The Cantor set::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: f1
x |-> [1/3] x + [0]
sage: f2
x |-> [1/3] x + [2/3]
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
"""
def __init__(self, module, edges):
r"""
See class documentation.
EXAMPLES::
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
"""
self._module = module
self._edges = edges
def __repr__(self):
r"""
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
GIFS defined by 2 maps on
Vector space of dimension 1 over Rational Field
"""
return ("GIFS defined by {} maps on {}".format(len(self._edges),
self._module))
def pp(self):
r"""
Prints a nicer and complete string representation.
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: F = AffineGroup(1, QQ)
sage: ifs = f1 = F.linear(1/3)
sage: f2 = F(1/3, vector([2/3]))
sage: ifs = GIFS(QQ^1, [(0,0,f1),(0,0,f2)])
sage: ifs.pp()
GIFS defined by 2 maps on Vector space of dimension 1 over Rational Field
edge (0,0):
x |-> [1/3] x + [0]
edge (0,0):
x |-> [1/3] x + [2/3]
"""
print("GIFS defined by {} maps on {}".format(len(self._edges),
self._module))
for (a,b,f) in self._edges:
print("edg
|
e ({},{}):".format(a,b))
print(f)
@classmethod
def from_one_dimensional_substitution(cls, m):
r"""
Return the GIFS defined by a unidimensional primitive
substitution
INPUT:
- ``m`` -- WordMorphism, primitive substitution
EXAMPLES::
sage: from slabbe import GraphD
|
irectedIteratedFunctionSystem as GIFS
sage: m = WordMorphism('a->ab,b->a')
sage: g = GIFS.from_one_dimensional_substitution(m)
sage: g
GIFS defined by 3 maps on
Vector space of dimension 1 over
Number Field in root with defining polynomial x^2 - x - 1 with
root = 1.618033988749895?
"""
from slabbe.matrices import perron_left_eigenvector_in_number_field
M = m.incidence_matrix()
root, perron_left = perron_left_eigenvector_in_number_field(M, 'root')
K = root.parent()
alphabet = m.domain().alphabet()
size = alphabet.cardinality()
module = K**1
d = {(i,j):[] for i,j in itertools.product(range(size),repeat=2)}
for i,a in enumerate(alphabet):
m_a = m(a)
pos = module.zero()
for b in m_a:
j = alphabet.index(b)
d[(i,j)].append(pos)
pos += module([perron_left[j]])
return cls.from_inflation_rule(module, root, d)
@classmethod
def from_two_dimensional_substitution(cls, s):
r"""
Return the GIFS defined by a 2-dimensional primitive
substitution
The marker point associated to each rectangular tile is assumed to
be in the lower left corner.
INPUT:
- ``s`` -- Substitution2d, primitive substitution
EXAMPLES::
sage: from slabbe import GraphDirectedIteratedFunctionSystem as GIFS
sage: from slabbe import Substitution2d
sage: d = {0:[[3]], 1:[[3],[2]], 2:[[3,1]], 3:[[3,1],[2,0]]}
sage: s = Substitution2d(d)
sage: ifs = GIFS.from_two_dimensional_substitution(s)
sage: ifs.pp()
GIFS defined by
|
Stanford-Online/edx-analytics-data-api-client
|
analyticsclient/tests/test_course_summaries.py
|
Python
|
apache-2.0
| 1,471
| 0.00136
|
import ddt
from analyticsclient.tests import (
APIListTestCase,
APIWithPostableIDsTestCase,
ClientTestCase
)
@ddt.ddt
class CourseSummariesTests(APIListTestCase, APIWithPostableIDsTestCase, ClientTestCase):
endpoint = 'course_summaries'
id_field = 'course_ids'
_LIST_PARAMS = frozenset([
'course_ids',
'availability',
'pacing_type',
'program_ids',
'fields',
'exclude',
])
_STRING_PARAMS = frozenset([
'text_search',
'order_by',
'sort_
|
order',
])
_INT_PARAMS = frozenset([
'page',
'page_size',
])
_ALL_PARAMS = _LIST_PARAMS | _STRING_PARAMS | _INT_PARAMS
other_params = _ALL_PARAMS
# Test URL encoding (note: '+' is not handled right by httpretty, but it works in practice)
_TEST_STRING = 'Aa1_-:/* '
@ddt.data(
(_LIST_PARAMS, ['a', 'b', 'c']),
(_LIST_PARAMS, [_TEST_STRING]),
(_LIST_PARAMS, []),
(_STRING_PARAMS, _TEST_STRING),
(_STRING_PARAM
|
S, ''),
(_INT_PARAMS, 1),
(_INT_PARAMS, 0),
(frozenset(), None),
)
@ddt.unpack
def test_all_parameters(self, param_names, param_value):
"""Course summaries can be called with all parameters."""
params = {param_name: None for param_name in self._ALL_PARAMS}
params.update({param_name: param_value for param_name in param_names})
self.verify_query_params(**params)
|
protwis/protwis
|
contactnetwork/management/commands/build_distance_representative.py
|
Python
|
apache-2.0
| 2,958
| 0.006423
|
from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from django.conf import settings
from django.db import connection
from django.db.models import Q, F
from contactnetwork.distances import *
from protein.models import ProteinFamily
import time
import scipy
class Command(BaseCommand):
help = "Build distance representatives"
|
def handle(self, *args, **options):
self.receptor_representatives()
def receptor_representatives(self):
print('Script to decide dis
|
tance representative for a state/receptor combination. Lowest average distance to all other structures for the same receptor/state')
structures = Structure.objects.all().prefetch_related(
"pdb_code",
"state",
"protein_conformation__protein__parent__family")
distinct_proteins = {}
resolution_lookup = {}
for s in structures:
pdb = s.pdb_code.index
resolution_lookup[pdb] = s.resolution
state = s.state.slug
slug = s.protein_conformation.protein.parent.family.slug
name = s.protein_conformation.protein.parent.family.name
key = '{}_{}'.format(name,state)
if key not in distinct_proteins:
distinct_proteins[key] = []
distinct_proteins[key].append(pdb)
for conformation, pdbs in distinct_proteins.items():
print(conformation, "PDBS:",pdbs)
number_of_pdbs = len(pdbs)
if (number_of_pdbs==1):
# Do not care when only one PDB for a conformation rep
print("REPRESENTATIVE:", pdbs[0])
s = Structure.objects.get(pdb_code__index=pdbs[0])
s.distance_representative = True
s.save()
else:
# Distances
dis = Distances()
dis.load_pdbs(pdbs)
distance_matrix = dis.get_distance_matrix()
# Calculate structures with lowest average distance (rank-by-vote fusion)
ranking = np.zeros(len(distance_matrix))
average = np.zeros(len(distance_matrix))
for i in range(0,len(distance_matrix)):
ranking = ranking + scipy.stats.rankdata(distance_matrix[i,:], method='min')
average = average + distance_matrix[i,:]
# check if single minimum
lowest = np.where(ranking==min(ranking))[0]
if len(lowest)>1:
lowest = lowest[np.where(average[lowest]==min(average))[0][0]]
for i in range(0,len(distance_matrix)):
if i==lowest:
print("REPRESENTATIVE:",pdbs[i])
s = Structure.objects.get(pdb_code__index=pdbs[i])
s.distance_representative = (i==lowest)
s.save()
|
mag6367/Cracking_the_Coding_Interview_Python_Solutions
|
chapter3/stack.py
|
Python
|
mit
| 418
| 0.057416
|
# Stack implementation
class S
|
tack (object):
def __init__ (self):
self.stack = []
def push (self, data):
se
|
lf.stack.append(data)
def peek (self):
if self.isEmpty():
return None
return self.stack[-1]
def pop (self):
if self.isEmpty():
return None
return self.stack.pop()
def isEmpty (self):
return len(self.stack) == 0
def __str__ (self):
return ' '.join(str(x) for x in self.stack)
|
shawnhermans/cyborgcrm
|
cyactivities/apps.py
|
Python
|
bsd-2-clause
| 192
| 0.010417
|
from django.apps import AppConfig
class ActivityConfig(AppConfig):
name = 'cyactivities'
verbose_name = 'Cyborg Activities'
def ready(self):
|
import cyacti
|
vities.signals
|
richardcornish/django-paywall
|
regwall/tests/articles/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 1,053
| 0.003799
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-14 17:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('headline', models.CharField(max
|
_length=255, verbose_name='Headline')),
|
('slug', models.SlugField(max_length=255, unique=True, verbose_name='Slug')),
('body', models.TextField(verbose_name='Body')),
('pub_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Pub date')),
],
options={
'verbose_name': 'article',
'ordering': ['-pub_date'],
'verbose_name_plural': 'articles',
},
),
]
|
NeuroDataDesign/seelviz
|
seelviz/brainalign.py
|
Python
|
apache-2.0
| 3,476
| 0.006617
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from ndreg import *
import ndio.remote.neurodata as neurodata
import nibabel as nb
refToken = "ara_ccf2"
refImg = imgDownload(refToken)
imgShow(refImg)
plt.savefig("refImg_initial.png", bbox_inches='tight')
imgShow(refImg, vmax=500)
plt.savefig("refImg_initial_vmax500.png", bbox_inches='tight')
refAnnoImg = imgDownload(refToken, channel="annotation")
imgShow(refAnnoImg, vmax=1000)
plt.savefig("refAnnoImg_initial_vmax1000.png", bbox_inches='tight')
randValues = np.random.rand(1000,3)
randValues = np.concatenate(([[0,0,0]],randValues))
randCmap = matplotlib.colors.ListedColormap (randValues)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("ColorefAnnoImg_initial_vmax1000.png", bbox_inches='tight')
imgShow(refImg, vmax=500, newFig=False)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, newFig=False)
plt.show()
plt.savefig("OverlaidImg.png", bbox_inches='tight')
inToken = "Control258"
nd = neurodata()
inImg = imgDownload(inToken, resolution=5)
imgShow(inImg, vmax=500)
plt.savefig("rawImgvmax500.png", bbox_inches='tight')
inImg.SetSpacing([0.01872, 0.01872, 0.005])
inImg_download = inImg
inImg = imgResample(inImg, spacing=refImg.GetSpacing())
imgShow(inImg, vmax=500)
plt.savefig("resample_inImg.png", bbox_inches='tight')
inImg = imgReorient(inImg, "LAI", "RSA")
imgShow(inImg, vmax=500)
plt.savefig("resample_inImg_rotated.png", bbox_inches='tight')
inImg_reorient = inImg
spacing=[0.25,0.25,0.25]
refImg_ds = imgResample(refImg, spacing=spacing)
imgShow(refImg_ds, vmax=500)
plt.savefig("resample_refImg.png", bbox_inches='tight')
inImg_ds = imgResample(inImg, spacing=spacing)
imgShow(inImg_ds, vmax=500)
plt.savefig("inImg_ds.png", bbox_inches='tight')
affine = imgAffineComposite(inImg_ds, refImg_ds, iterati
|
ons=100, useMI=True, verbose=True)
inImg_affine = imgApplyAffine(inImg, affine, size=refImg.GetSize())
imgShow(inImg_affine, vmax=500)
plt.savefig("inImg_affine.png", bbox_inches='tight')
inImg_ds = img
|
Resample(inImg_affine, spacing=spacing)
(field, invField) = imgMetamorphosisComposite(inImg_ds, refImg_ds, alphaList=[0.05, 0.02, 0.01], useMI=True, iterations=100, verbose=True)
inImg_lddmm = imgApplyField(inImg_affine, field, size=refImg.GetSize())
imgShow(inImg_lddmm, vmax = 500)
imgShow(inImg_lddmm, vmax=500, newFig=False, numSlices=1)
imgShow(refAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, newFig=False, numSlices=1)
plt.savefig("overlay.png", bbox_inches='tight')
##################
# Reverse orientation
########
invAffine = affineInverse(affine)
invAffineField = affineToField(invAffine, refImg.GetSize(), refImg.GetSpacing())
invField = fieldApplyField(invAffineField, invField)
inAnnoImg = imgApplyField(refAnnoImg, invField,useNearest=True, size=inImg_reorient.GetSize())
imgShow(inAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("reverse_affine_annotations.png", bbox_inches='tight')
inAnnoImg = imgReorient(inAnnoImg, "RSA", "LAI")
imgShow(inAnnoImg, vmax=1000, cmap=randCmap)
plt.savefig("reoriented_reverse_affine_annotation.png", bbox_inches='tight')
inAnnoImg = imgResample(inAnnoImg, spacing=inImg_download.GetSpacing(), size=inImg_download.GetSize(), useNearest=True)
imgShow(inImg_download, vmax=500, numSlices=1, newFig=False)
imgShow(inAnnoImg, vmax=1000, cmap=randCmap, alpha=0.2, numSlices=1, newFig=False)
plt.savefig("final_atlas.png", bbox_inches='tight')
imgWrite(inAnnoImg, "final_resized_atlas.nii")
|
lektor/lektor
|
tests/test_i18n.py
|
Python
|
bsd-3-clause
| 252
| 0
|
import impor
|
tlib
import lektor.i18n
def test_loading_i18n_triggers_no_warnings(recwarn):
importlib.reload(lektor.i18n)
for warning in recwarn.list:
print(warning) # debugging: display warnings on stdou
|
t
assert len(recwarn) == 0
|
RockinRobin/seednetwork
|
seedlibrary/forms.py
|
Python
|
mit
| 6,875
| 0.0224
|
from django import forms
from seednetwork.forms import SeedNetworkBaseForm
from seedlibrary.models import Event
GRAIN_CHOICES = (
('-','-'),
('amaranth','Amaranth'),
('barley', 'Barley'),
('buckwheat', 'Buckwheat'),
('corn', 'Corn'),
# ('kaniwa', 'Kaniwa'),
('millet', 'Millet'),
('oats', 'Oats'),
('quinoa', 'Quinoa'),
('rice', 'Rice'),
('rye', 'Rye'),
('sorghum', 'Sorghum'),
('teff', 'Teff'),
#
|
('triticale', 'Triticale'),
('wheat', 'Wheat'),
)
GRAIN_SUBCATEGORIES = (
('-','-'),
('grain', 'Amaranth: Grain'),
('leaf', 'Amaranth: Leaf'),
('grain and leaf', 'Amaranth: Grain and Leaf'),
('common', 'Barley: Common'),
('hulless','Barley: Hulless'),
('common', 'Buckwheat: Common'),
('tartar
|
y', 'Buckwheat: Tartary'),
('dent', 'Corn: Dent'),
('flint', 'Corn: Flint'),
('flour', 'Corn: Flour'),
('popcorn', 'Corn: Popcorn'),
('sweet', 'Corn: Sweet'),
('finger', 'Millet: Finger'),
('foxtail', 'Millet: Foxtail'),
('pearl', 'Millet: Pearl'),
('proso', 'Millet: Proso'),
('common', 'Oats: Common'),
('hulless', 'Oats: Hulless'),
('dryland', 'Rice: Dryland'),
('paddy', 'Rice: Paddy'),
('broom', 'Sorghum: Broom'),
('grain', 'Sorghum: Grain'),
('sweet', 'Sorghum: Sweet'),
('multiuse', 'Sorghum: Multi-use'),
('unknown', 'Wheat: Not Sure'),
('club', 'Wheat: Club (Hexaploid)'),
('common', 'Wheat: Common (Hexaploid)'),
('durum', 'Wheat: Durum (Tetraploid)'),
('einkorn', 'Wheat: Einkorn (Diploid)'),
('emmer', 'Wheat: Emmer (Tetraploid)'),
('khorasan', 'Wheat: Khorasan (Tetraploid)'),
('macha', 'Wheat: Macha (Hexaploid)'),
('persian', 'Wheat: Persian (Tetraploid)'),
('polish', 'Wheat: Polish (Tetraploid)'),
('rivetpoulardcone', 'Wheat: Rivet/Poulard/Cone (Tetraploid)'),
('shot', 'Wheat: Shot (Hexaploid)'),
('spelt', 'Wheat: Spelt (Hexaploid)'),
('zanduri', 'Wheat: Zanduri (Tetraploid)'),
)
class GrainForm(SeedNetworkBaseForm):
required_css_class = 'required'
# seed_type = forms.CharField(label="Seed Type", max_length=150, required=False, help_text="i.e. grain, vegetable, herb, perennial, fruit bush, fruit tree, etc.")
crop_type = forms.ChoiceField(label="Grain", choices=GRAIN_CHOICES, required=True)
grain_subcategory=forms.ChoiceField(label="Grain Subcategory", choices=GRAIN_SUBCATEGORIES,required=True)
seed_variety = forms.CharField(label="Variety Name", max_length=150, required=True, help_text="e.g. Ukrainka, PI 356457 etc.")
seed_description = forms.CharField(label="Short Description", widget=forms.Textarea(attrs={'rows':'2', 'cols':'60'}), required=False, help_text="Briefly highlight defining characteristics. This text will appear in the Short Description column on the Browse Seeds page. Longer descriptions available in \'More Information\'. ")
enough_to_share = forms.BooleanField(label="Availability", required=False, help_text="Is your seed available for sharing or purchase? Please indicate terms on member profile page.")
year = forms.CharField(label="Year", max_length=150, required=False, help_text="What year was your seed grown?")
origin = forms.CharField(label="Source", max_length=150, required=False, help_text="The year and from whom you first obtained the seed.")
# events = forms.ModelMultipleChoiceField(Event.objects.filter(show_on_seed_edit=True), required=False, widget=forms.CheckboxSelectMultiple, help_text="What events will you bring the seed to?")
more_info = forms.BooleanField(label="More Information", required=False, help_text="Check the box to provide more detailed information that will be available on the seed's profile page.")
class ExtendedGrainForm(SeedNetworkBaseForm):
latin_name = forms.CharField(label="Latin Name", max_length=100, required=False, help_text="e.g. Triticum monococcum")
improvement_status =forms.ChoiceField(label="Improvement Status", choices=(('-','-'),('landrace','Landrace'),('cultivar','Cultivar'),('unknown','Unknown')),required=False)
growth_habit=forms.ChoiceField(label="Growth Habit", choices=(('-','-'),('spring','Spring'),('winter','Winter'),('facultative','Facultative'), ('perennial','Perennial')),required=False)
days_to_maturity=forms.IntegerField(label="Days to Maturity", required=False, help_text="Enter only a single number, even if that is an estimate.")
lodging=forms.ChoiceField(choices=(('-','-'),(1,'1'),(2,'2'),(3,'3'),(4,'4'),(5,'5'),(6,'6'),(7,'7'),(8,'8'),(9,'9')), required=False, help_text="1 = no lodging, 9 = all plants flat.")
cultivation=forms.CharField(label="Cultivation",widget=forms.Textarea( attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Bed preparation, spacing, interplanting, fertility needs, pest protection, grown organically?")
cold_hardiness=forms.CharField(label="Cold Hardiness", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Susceptibility to frost/freeze damage in spring/fall/winter? For example, \'A freak mid-June frost did not seem to slow down growth at all in USDA zone 5a.\'")
disease=forms.CharField(label="Disease",widget=forms.Textarea(attrs={'rows':'3', 'cols':'60'}), required=False, help_text="Describe disease resistance or susceptibility of variety.")
threshing=forms.CharField(label="Threshing",widget=forms.Textarea( attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Describe ease or difficulty of threshing, shelling, dehulling.")
cold_hardiness=forms.CharField(label="Cold Hardiness", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Susceptibility to frost/freeze damage in spring/fall/winter? For example, \'A freak mid-June frost did not seem to slow down growth at all in USDA zone 5a.\'")
culinary_qualities=forms.CharField(label="Culinary Qualities", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Baking, cooking, or brewing qualities and uses.")
other_uses=forms.CharField(label="Other Uses", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Livestock feed, bedding, broom-making, straw weaving, thatching, etc.")
additional_info=forms.CharField(label="Additional Information", widget=forms.Textarea(attrs={'rows':'5', 'cols':'60'}), required=False, help_text="Interesting history, cultural information, etc.")
external_url=forms.URLField(label="External URL", required=False, help_text="Include a link to a website with related information, or to your own website.")
class SeedExportForm(SeedNetworkBaseForm):
archive = forms.BooleanField(required=False, help_text="Do you want to export your archived seed listings?")
|
AloneRoad/waskr
|
waskr/database.py
|
Python
|
mit
| 2,723
| 0.011017
|
import os
import sqlite3
from time import time, strftime, gmtime
from waskr.config import options
import log
# Fixes Database Absolute Location
FILE_CWD = os.path.abspath(__file__)
FILE_DIR = os.path.dirname(FILE_CWD)
DB_FILE = FILE_DIR+'/waskr.db'
# Engines Supported
engines_supported = ['sqlite', 'mongodb']
class conf_db(object):
def __init__(self,
db = DB_FILE):
self.db = db
if os.path.isfile(self.db):
self.conn = sqlite3.connect(self.db)
self.c = self.conn.cursor()
else:
self.conn = sqlite3.connect(self.db)
table = """CREATE TABLE config(path TEXT)"""
self.c = self.conn.cursor()
self.c.execute(table)
self.conn.commit()
def closedb(self):
"""Make sure the db is closed"""
self.conn.close()
def add_config(self, path):
"""Adds a MASTER config for waskr"""
values = (path,path)
delete = 'DELETE FROM config'
command = 'INSERT INTO config(path) select ? WHERE NOT EXISTS(SELECT 1 FROM config WHERE path=?)'
self.c.execute(delete)
self.c.execute(command, values)
self.conn.commit()
def get_config_path(self):
"""Returns the first entry for the config path"""
command = "SELECT * FROM config limit 1"
return self.c.execute(command)
class Stats(object):
def __init__(self,config=None, test=False):
self.config = options(config)
self.engine = self._load_engine()
self.stats = self.engine.Stats(config, test)
def _load_engine(self):
if self._check_module(self.config['db_engine']):
engine = __import__('waskr.engines.%s' % self.config['db_engine'],
|
fromlist=['None'])
else:
engine = __import__('waskr.engines.sqlite',
fromlist=['None']) # fall backs to sqlite3
return engine
def _check_module(self, module):
if module not in engines_supported:
return False
|
return True
def insert(self, stats):
self.stats.insert(stats)
def last_insert(self):
return self.stats.last_insert()
def apps_nodes(self):
return self.stats.apps_nodes()
def response_time(self, minutes):
return self.stats.response_time(minutes)
def response_bundle(self, minutes):
return self.stats.request_bundle(minutes)
def request_bundle(self, minutes):
return self.stats.request_bundle(minutes)
def request_time(self, minutes):
return self.stats.request_time(minutes)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.