repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Jarob22/selenium | py/selenium/webdriver/__init__.py | Python | apache-2.0 | 1,735 | 0 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Un | less required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .firefox.webdriver import WebDriver as Firefo | x # noqa
from .firefox.firefox_profile import FirefoxProfile # noqa
from .chrome.webdriver import WebDriver as Chrome # noqa
from .chrome.options import Options as ChromeOptions # noqa
from .ie.webdriver import WebDriver as Ie # noqa
from .edge.webdriver import WebDriver as Edge # noqa
from .opera.webdriver import WebDriver as Opera # noqa
from .safari.webdriver import WebDriver as Safari # noqa
from .blackberry.webdriver import WebDriver as BlackBerry # noqa
from .phantomjs.webdriver import WebDriver as PhantomJS # noqa
from .android.webdriver import WebDriver as Android # noqa
from .remote.webdriver import WebDriver as Remote # noqa
from .common.desired_capabilities import DesiredCapabilities # noqa
from .common.action_chains import ActionChains # noqa
from .common.touch_actions import TouchActions # noqa
from .common.proxy import Proxy # noqa
__version__ = '3.3.1'
|
EventGhost/EventGhost | eg/Classes/AddActionGroupDialog.py | Python | gpl-2.0 | 1,500 | 0.000667 | # -*- coding: utf-8 -*-
#
# This file is part of EventGhost.
# Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/>
#
# EventGhost is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 2 of the License, or (at your option)
# any later version.
#
# EventGhost is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General P | ublic License for
# more details.
#
# You should have received a copy of the GNU General Public License along
# with EventGhost. If not, see <http://www.gnu.org/licenses/>.
# Local imports
import eg
class Text(eg.TranslatableStrings):
caption = "Add Actions?"
message = (
"EventGhost can add a folder with all actions of this plugin to your | "
"configuration tree. If you want to do so, select the location where "
"it should be added and press OK.\n\n"
"Otherwise press the cancel button."
)
class AddActionGroupDialog(eg.TreeItemBrowseDialog):
def Configure(self, parent=None):
eg.TreeItemBrowseDialog.Configure(
self,
Text.caption,
Text.message,
searchItem=None,
resultClasses=(eg.FolderItem, eg.RootItem),
filterClasses=(eg.FolderItem, ),
parent=parent,
)
|
weblabdeusto/weblablib | examples/simple/example.py | Python | agpl-3.0 | 3,126 | 0.006398 | from __future__ import print_function
import time
from flask import Flask, session, url_for
from flask_debugtoolbar import DebugToolbarExtension
from weblablib import WebLab, requires_active, weblab_user, poll
app = Flask(__name__)
# XXX: IMPORTANT SETTINGS TO CHANGE
app.config['SECRET_KEY'] = 'something random' # e.g., run: os.urandom(32) and put the output here
app.config['WEBLAB_USERNAME'] = 'weblabdeusto' # This is the http_username you put in WebLab-Deusto
app.config['WEBLAB_PASSWORD'] = 'password' # This is the http_password you put in WebLab-Deusto
# XXX You should change...
# Use different cookie names for different labs
app.config['SESSION_COOKIE_NAME'] = 'lab'
# app.config['WEBLAB_UNAUTHORIZED_LINK'] = 'https://weblab.deusto.es/weblab/' # Your own WebLab-Deusto URL
# The URL for this lab (e.g., you might have two labs, /lab1 and /lab2 in the same server)
app.config['SESSION_COOKIE_PATH'] = '/lab'
# The session_id is stored in the Flask session. You might also use a different name
app.config['WEBLAB_SESSION_ID_NAME'] = 'lab_session_id'
# These are optional parameters
# Flask-Debug: don't intercept redirects (go directly)
app.config['DEBUG_TB_INTERCEPT_REDIRECTS'] = False
# app.config['WEBLAB_BASE_URL'] = '' # If you want the weblab path to start by /foo/weblab, you can put '/foo'
# app.config['WEBLAB_REDIS_URL'] = 'redis://localhost:6379/0' # default value
# app.config['WEBLAB_REDIS_BASE'] = 'lab1' # If you have more tha | n one lab in the same redis database
# app.config['WEBLAB_CALLBACK_URL'] = '/lab/public' # If you don't pass it in the creator
# app.config['WEBLAB_TIMEOUT'] = 15 # in seconds, default value
# app.config['WEBLAB_SCHEME'] = 'https'
weblab = Web | Lab(app, callback_url='/lab/public')
toolbar = DebugToolbarExtension(app)
@weblab.initial_url
def initial_url():
"""
This returns the landing URL (e.g., where the user will be forwarded).
"""
return url_for('.lab')
@weblab.on_start
def on_start(client_data, server_data):
"""
In this code, you can do something to setup the experiment. It is
called for every user, before they start using it.
"""
print("New user!")
print(weblab_user)
@weblab.on_dispose
def on_stop():
"""
In this code, you can do something to clean up the experiment. It is
guaranteed to be run.
"""
print("User expired. Here you should clean resources")
print(weblab_user)
@app.route('/lab/')
@requires_active
def lab():
"""
This is your code. If you provide @requires_active to any other URL, it is secured.
"""
user = weblab_user
return "Hello %s. You didn't poll in %.2f seconds (timeout configured to %s). Total time left: %s" % (user.username, user.time_without_polling, weblab.timeout, user.time_left)
@app.route("/")
def index():
return "<html><head></head><body><a href='{}'>Access to the lab</a></body></html>".format(url_for('.lab'))
if __name__ == '__main__':
print("Run the following:")
print()
print(" (optionally) $ export FLASK_DEBUG=1")
print(" $ export FLASK_APP={}".format(__file__))
print(" $ flask run")
print()
|
samskeller/zeroclickinfo-fathead | lib/fathead/py_pi/parse.py | Python | apache-2.0 | 1,981 | 0.003534 | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import codecs
import json
import re
import urllib
with codecs.open('download/package-jsons', encoding='utf-8') as in_file, \
codecs.open('output.txt', mode='wb', encoding='utf-8') as out_file:
for package_json in in_file:
package_dict = json.loads(package_json)
package_info = package_dict['info']
# Build abstract
abstract_lines = []
summary = package_info['summary']
if not summary or summary == 'UNKNOWN':
continue
abstract_lines.append(re.sub(r'\s', ' ', summary, flags=re.MULTILINE | re.UNICODE))
#abstract_lines.append('Downloads in the last month: %s' % package_info['d | ownloads']['last_month'])
for classifier in package_info['classifiers']:
if classifier.startswith('Development Status'):
abstract_lines.append('Development status: %s' % classifier.split(' - ')[-1])
break
ab | stract_lines.append("<pre><code>pip install " + package_info['name'] + "</code></pre>")
official_site = ''
# check for real links. We can get stuff like 'unknown', '404' in here
if package_info['home_page'] and re.search(r'www.', package_info['home_page']):
official_site = '[' + package_info['home_page'] + ' Official site]\\\\n'
out_file.write('\t'.join([
package_info['name'], # Title
'A', # Article type
'', # No redirect
'', # Other uses (ignored)
'', # No categories
'', # References (ignored)
'', # No related topics
'', # Further reading (ignored)
official_site, # External links (ignored)
'', # Disambiguation (ignored)
'', # No images
'<br>'.join(abstract_lines),
urllib.quote(package_info['package_url'], safe='/:'), # Source url
]))
out_file.write('\n')
|
espressopp/espressopp | src/analysis/NeighborFluctuation.py | Python | gpl-3.0 | 1,852 | 0.00378 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
***************************************
espressopp.analysis.NeighborFluctuation
***************************************
.. function:: espressopp.analysis.NeighborFluctuation(system, radius)
:param system:
:param radius:
:type system:
:type radius:
"""
from espressopp.esutil import cxxinit
from espressopp import pmi
from espressopp.analysis.Observable import *
from _espressopp import analysis_NeighborFluctuation
class NeighborFluctuationLocal(ObservableLocal, analysis_NeighborFluctuation):
def __init__(self, system, radius):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup( | ):
cxxinit(self, analysis_NeighborFluctuation, system, radius)
if pmi.isController :
class NeighborFluctuation(Observable, metaclass=pmi.Proxy):
pmiproxydefs = dict(
cls = 'espressopp.analysis.NeighborFl | uctuationLocal'
)
|
ronaldahmed/robot-navigation | neural-navigation-with-lstm/MARCO/plastk/rand.py | Python | mit | 1,050 | 0.02381 | """
Random numbers for PLASTK.
$Id: rand.py,v 1.4 2005/08/20 16:46:14 jp Exp $
"""
#from RandomArray import *
import sys
def shuffle(L):
"""
Return randomly permuted version of L. (non-destructive)
"""
return [L[i] for i in permutation(len(L))]
def strseed(s):
s1 = s[::2]
s2 = s[1::2]
seed(int(strhash(s1)),int(strhash(s2)))
def randrange(i,j=None,step=1):
if j==None:
r = range(0,i,step)
else:
r = range(i,j,step)
return r[int(uniform(0,len(r)))]
def sample(seq,weights=[]):
if not weights:
return seq[randrange(len(seq))]
else:
assert len(weights) == len(seq)
| retur | n seq[sample_index(weights)]
def sample_index(weights):
total = sum(weights)
if total == 0:
return randrange(len(weights))
index = random() * total
accum = 0
for i,x in enumerate(weights):
accum += x
if index < accum:
return i
def strhash(s,base=31,mod=2147483647):
return reduce(lambda x,y: (x*base+y)%mod,map(ord,s))
|
devonjones/PSRD-Parser | src/psrd/sql/animal_companions.py | Python | gpl-3.0 | 1,808 | 0.03042 | from psrd.sql.utils import test_args
def create_animal_companion_details_table(curs):
sql = '\n'.join([
"CREATE TABLE animal_companion_details (",
" animal_companion_details_id INTEGER PRIMARY KEY,",
" section_id INTEGER NOT NULL,",
" ac TEXT,",
" attack TEXT,",
" cmd TEXT,",
" ability_scores TEXT,",
" special_abilities TEXT,",
" special_qualities TEXT,",
" special_attacks TEXT,",
" size TEXT,",
" speed TEXT,",
" bonus_feat TEXT,",
" level TEXT",
")"])
curs.execute(sql)
def create_animal_companion_details_index(curs):
sql = '\n'.join([
"CREATE INDEX animal_companion_details_section_id",
" ON animal_companion_details (section_id)"])
curs.execute(sql)
def insert_animal_companion_detail(curs, section_id, ac=None, at | tack=None,
cmd=None, ability_scores=None, special_abilities= | None,
special_qualities=None, special_attacks=None, size=None, speed=None,
bonus_feat=None, level=None, **kwargs):
values = [section_id, ac, attack, cmd, ability_scores, special_abilities,
special_qualities, special_attacks, size, speed, bonus_feat, level]
test_args(kwargs)
sql = '\n'.join([
"INSERT INTO animal_companion_details",
" (section_id, ac, attack, cmd, ability_scores, special_abilities, "
" special_qualities, special_attacks, size, speed, bonus_feat, level)",
" VALUES",
" (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"])
curs.execute(sql, values)
def delete_animal_companion_detail(curs, section_id):
values = [section_id]
sql = '\n'.join([
"DELETE FROM animal_companion_details",
" WHERE section_id = ?"])
curs.execute(sql, values)
def fetch_animal_companion_detail(curs, section_id):
values = [section_id]
sql = '\n'.join([
"SELECT *",
" FROM animal_companion_details",
" WHERE section_id = ?"])
curs.execute(sql, values)
|
efornal/platy | manage.py | Python | gpl-3.0 | 248 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
| os.environ.setdefault("DJANGO_SETTINGS_MODULE", "platy.settings")
from django.core.management import execute_from_command_line
| execute_from_command_line(sys.argv)
|
alexras/pelican | pelican/generators.py | Python | agpl-3.0 | 31,054 | 0.000773 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import os
import six
import logging
import shutil
import fnmatch
import calendar
from codecs import open
from collections import defaultdict
from functools import partial
from itertools import chain, groupby
from operator import attrgetter
from jinja2 import (Environment, FileSystemLoader, PrefixLoader, ChoiceLoader,
BaseLoader, TemplateNotFound)
from pelican.cache import FileStampDataCacher
from pelican.contents import Article, Draft, Page, Static, is_valid_content
from pelican.readers import Readers
from pelican.utils import (copy, process_translations, mkdir_p, DateFormatter,
python_2_unicode_compatible, posixize_path)
from pelican import signals
logger = logging.getLogger(__name__)
class PelicanTemplateNotFound(Exception):
pass
@python_2_unicode_compatible
class Generator(object):
"""Baseclass generator"""
def __init__(self, context, settings, path, theme, output_path,
readers_cache_name='', **kwargs):
self.context = context
self.settings = settings
self.path = path
self.theme = theme
self.output_path = output_path
for arg, value in kwargs.items():
setattr(self, arg, value)
self.readers = Readers(self.settings, readers_cache_name)
# templates cache
self._templates = {}
self._templates_path = []
self._templates_path.append(os.path.expanduser(
os.path.join(self.theme, 'templates')))
self._templates_path += self.settings['EXTRA_TEMPLATES_PATHS']
theme_path = os.path.dirname(os.path.abspath(__file__))
simple_loader = FileSystemLoader(os.path.join(t | heme_path,
"themes", "simple", | "templates"))
self.env = Environment(
trim_blocks=True,
lstrip_blocks=True,
loader=ChoiceLoader([
FileSystemLoader(self._templates_path),
simple_loader, # implicit inheritance
PrefixLoader({'!simple': simple_loader}) # explicit one
]),
extensions=self.settings['JINJA_EXTENSIONS'],
)
logger.debug('Template list: %s', self.env.list_templates())
# provide utils.strftime as a jinja filter
self.env.filters.update({'strftime': DateFormatter()})
# get custom Jinja filters from user settings
custom_filters = self.settings['JINJA_FILTERS']
self.env.filters.update(custom_filters)
signals.generator_init.send(self)
def get_template(self, name):
"""Return the template by name.
Use self.theme to get the templates to use, and return a list of
templates ready to use with Jinja2.
"""
if name not in self._templates:
try:
self._templates[name] = self.env.get_template(name + '.html')
except TemplateNotFound:
raise PelicanTemplateNotFound('[templates] unable to load %s.html from %s'
% (name, self._templates_path))
return self._templates[name]
def _include_path(self, path, extensions=None):
"""Inclusion logic for .get_files(), returns True/False
:param path: the path which might be including
:param extensions: the list of allowed extensions (if False, all
extensions are allowed)
"""
if extensions is None:
extensions = tuple(self.readers.extensions)
basename = os.path.basename(path)
#check IGNORE_FILES
ignores = self.settings['IGNORE_FILES']
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
return False
if extensions is False or basename.endswith(extensions):
return True
return False
def get_files(self, paths, exclude=[], extensions=None):
"""Return a list of files to use, based on rules
:param paths: the list pf paths to search (relative to self.path)
:param exclude: the list of path to exclude
:param extensions: the list of allowed extensions (if False, all
extensions are allowed)
"""
if isinstance(paths, six.string_types):
paths = [paths] # backward compatibility for older generators
# group the exclude dir names by parent path, for use with os.walk()
exclusions_by_dirpath = {}
for e in exclude:
parent_path, subdir = os.path.split(os.path.join(self.path, e))
exclusions_by_dirpath.setdefault(parent_path, set()).add(subdir)
files = []
ignores = self.settings['IGNORE_FILES']
for path in paths:
# careful: os.path.join() will add a slash when path == ''.
root = os.path.join(self.path, path) if path else self.path
if os.path.isdir(root):
for dirpath, dirs, temp_files in os.walk(root, followlinks=True):
drop = []
excl = exclusions_by_dirpath.get(dirpath, ())
for d in dirs:
if (d in excl or
any(fnmatch.fnmatch(d, ignore)
for ignore in ignores)):
drop.append(d)
for d in drop:
dirs.remove(d)
reldir = os.path.relpath(dirpath, self.path)
for f in temp_files:
fp = os.path.join(reldir, f)
if self._include_path(fp, extensions):
files.append(fp)
elif os.path.exists(root) and self._include_path(path, extensions):
files.append(path) # can't walk non-directories
return files
def add_source_path(self, content):
"""Record a source file path that a Generator found and processed.
Store a reference to its Content object, for url lookups later.
"""
location = content.get_relative_source_path()
self.context['filenames'][location] = content
def _add_failed_source_path(self, path):
"""Record a source file path that a Generator failed to process.
(For example, one that was missing mandatory metadata.)
The path argument is expected to be relative to self.path.
"""
self.context['filenames'][posixize_path(os.path.normpath(path))] = None
def _is_potential_source_path(self, path):
"""Return True if path was supposed to be used as a source file.
(This includes all source files that have been found by generators
before this method is called, even if they failed to process.)
The path argument is expected to be relative to self.path.
"""
return posixize_path(os.path.normpath(path)) in self.context['filenames']
def _update_context(self, items):
"""Update the context with the given items from the currrent
processor.
"""
for item in items:
value = getattr(self, item)
if hasattr(value, 'items'):
value = list(value.items()) # py3k safeguard for iterators
self.context[item] = value
def __str__(self):
# return the name of the class for logging purposes
return self.__class__.__name__
class CachingGenerator(Generator, FileStampDataCacher):
'''Subclass of Generator and FileStampDataCacher classes
enables content caching, either at the generator or reader level
'''
def __init__(self, *args, **kwargs):
'''Initialize the generator, then set up caching
note the multiple inheritance structure
'''
cls_name = self.__class__.__name__
Generator.__init__(self, *args,
readers_cache_name=(cls_name + '-Readers'),
**kwargs)
cache_this_level = self.settings['CONTENT_CACHING_LAYER'] == 'generator'
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
|
tectronics/l5rcm | dal/skill.py | Python | gpl-3.0 | 2,619 | 0.008782 | # -*- coding: iso-8859-1 -*-
# Copyright (C) 2011 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Publi | c License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
class MasteryAbility(object):
@staticmethod
def build_from_xml(elem):
f = MasteryAbility()
f.rank = int(elem.attrib['rank'])
f.rule = elem.attrib['rule'] if ('rule' in elem.attrib) else None
f.desc = elem.text
return f
class SkillCateg(object):
@staticmethod
def build_from_xml(elem):
f = SkillCateg()
f.id = elem.attrib['id']
f.name = elem.text
return f
def __str__(self):
return self.name
def __unicode__(self):
return self.name
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
class Skill(object):
@staticmethod
def build_from_xml(elem):
f = Skill()
f.name = elem.attrib['name']
f.id = elem.attrib['id']
f.trait = elem.attrib['trait']
f.type = elem.attrib['type']
f.tags = [f.type]
if elem.find('Tags'):
for se in elem.find('Tags').iter():
if se.tag == 'Tag':
f.tags.append(se.text)
f.mastery_abilities = []
if elem.find('MasteryAbilities'):
for se in elem.find('MasteryAbilities').iter():
if se.tag == 'MasteryAbility':
f.mastery_abilities.append(MasteryAbility.build_from_xml(se))
return f
def __str__(self):
return self.name or self.id
def __unicode__(self):
return self.name
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
|
brotherlogic/gobuildmaster | BuildAndRun.py | Python | apache-2.0 | 1,102 | 0.00363 | macimport os
import subprocess
name = "gobuildmaster"
current_hash = ""
for line in os.popen("md5sum " + name).readlines():
current_hash = line.split(' ')[0]
# Move the old version over
for line in os.popen('cp ' + name + ' old' + name).readlines():
print line.strip()
# Rebuild
for line | in os.popen('go build').readlines():
print line.strip()
size_1 = os.path.getsize('./old' + name)
size_2 = os.path.getsize('./' + name)
lines = os.popen('ps -ef | grep ' + name).readlines()
running = False
for line in lines:
if "./" + name in line:
running = True
new_hash = ""
for line in os.popen("md5sum " + name).readlines():
new_hash = line.split(' ')[0] |
if size_1 != size_2 or new_hash != current_hash or not running:
if not running:
for line in os.popen('cat out.txt | mail -E -s "Crash Report ' + name + '" brotherlogic@gmail.com').readlines():
pass
for line in os.popen('echo "" > out.txt').readlines():
pass
for line in os.popen('killall ' + name).readlines():
pass
subprocess.Popen(['./' + name])
|
popazerty/dvbapp2-gui | lib/python/Screens/Ci.py | Python | gpl-2.0 | 11,228 | 0.036961 | from Screen import Screen
from Components.ActionMap import ActionMap
from Components.ActionMap import NumberActionMap
from Components.Label import Label
from Components.config import config, ConfigSubsection, ConfigSelection, ConfigSubList, getConfigListEntry, KEY_LEFT, KEY_RIGHT, KEY_0, ConfigNothing, ConfigPIN
from Components.ConfigList import ConfigList
from Components.SystemInfo import SystemInfo
from enigma import eTimer, eDVBCI_UI, eDVBCIInterfaces
MAX_NUM_CI | = 4
def setCIBitrate(configElement):
if configElement.value == "no":
eDVBCI_UI.getInstance().setClockRate(configElement.slotid, eDVBCI_UI.rateNormal)
else:
eDVBCI_UI.getInstance().setClockRate(configElement.slotid, eDVBCI_UI.rateHigh)
def InitCiConfig():
config.ci = ConfigSubList()
for slot in range(MAX_NUM_CI):
config.ci.append(ConfigSubsection())
config.ci[slot].canDescrambleMultipleServices = ConfigSelection(choices = [("auto", _("Auto")), ("no", _("No")), ("yes" | , _("Yes"))], default = "auto")
if SystemInfo["CommonInterfaceSupportsHighBitrates"]:
config.ci[slot].canHandleHighBitrates = ConfigSelection(choices = [("no", _("No")), ("yes", _("Yes"))], default = "yes")
config.ci[slot].canHandleHighBitrates.slotid = slot
config.ci[slot].canHandleHighBitrates.addNotifier(setCIBitrate)
class MMIDialog(Screen):
def __init__(self, session, slotid, action, handler = eDVBCI_UI.getInstance(), wait_text = _("wait for ci...") ):
Screen.__init__(self, session)
print "MMIDialog with action" + str(action)
self.mmiclosed = False
self.tag = None
self.slotid = slotid
self.timer = eTimer()
self.timer.callback.append(self.keyCancel)
#else the skins fails
self["title"] = Label("")
self["subtitle"] = Label("")
self["bottom"] = Label("")
self["entries"] = ConfigList([ ])
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.okbuttonClick,
"cancel": self.keyCancel,
#for PIN
"left": self.keyLeft,
"right": self.keyRight,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
self.action = action
self.handler = handler
self.wait_text = wait_text
if action == 2: #start MMI
handler.startMMI(self.slotid)
self.showWait()
elif action == 3: #mmi already there (called from infobar)
self.showScreen()
def addEntry(self, list, entry):
if entry[0] == "TEXT": #handle every item (text / pin only?)
list.append( (entry[1], ConfigNothing(), entry[2]) )
if entry[0] == "PIN":
pinlength = entry[1]
if entry[3] == 1:
# masked pins:
x = ConfigPIN(0, len = pinlength, censor = "*")
else:
# unmasked pins:
x = ConfigPIN(0, len = pinlength)
x.addEndNotifier(self.pinEntered)
self["subtitle"].setText(entry[2])
list.append( getConfigListEntry("", x) )
self["bottom"].setText(_("please press OK when ready"))
def pinEntered(self, value):
self.okbuttonClick()
def okbuttonClick(self):
self.timer.stop()
if not self.tag:
return
if self.tag == "WAIT":
print "do nothing - wait"
elif self.tag == "MENU":
print "answer MENU"
cur = self["entries"].getCurrent()
if cur:
self.handler.answerMenu(self.slotid, cur[2])
else:
self.handler.answerMenu(self.slotid, 0)
self.showWait()
elif self.tag == "LIST":
print "answer LIST"
self.handler.answerMenu(self.slotid, 0)
self.showWait()
elif self.tag == "ENQ":
cur = self["entries"].getCurrent()
answer = str(cur[1].getValue())
length = len(answer)
while length < cur[1].getLength():
answer = '0'+answer
length+=1
self.handler.answerEnq(self.slotid, answer)
self.showWait()
def closeMmi(self):
self.timer.stop()
self.close(self.slotid)
def keyCancel(self):
self.timer.stop()
if not self.tag or self.mmiclosed:
self.closeMmi()
elif self.tag == "WAIT":
self.handler.stopMMI(self.slotid)
self.closeMmi()
elif self.tag in ( "MENU", "LIST" ):
print "cancel list"
self.handler.answerMenu(self.slotid, 0)
self.showWait()
elif self.tag == "ENQ":
print "cancel enq"
self.handler.cancelEnq(self.slotid)
self.showWait()
else:
print "give cancel action to ci"
def keyConfigEntry(self, key):
self.timer.stop()
try:
self["entries"].handleKey(key)
except:
pass
def keyNumberGlobal(self, number):
self.timer.stop()
self.keyConfigEntry(KEY_0 + number)
def keyLeft(self):
self.timer.stop()
self.keyConfigEntry(KEY_LEFT)
def keyRight(self):
self.timer.stop()
self.keyConfigEntry(KEY_RIGHT)
def updateList(self, list):
List = self["entries"]
try:
List.instance.moveSelectionTo(0)
except:
pass
List.l.setList(list)
def showWait(self):
self.tag = "WAIT"
self["title"].setText("")
self["subtitle"].setText("")
self["bottom"].setText("")
list = [ ]
list.append( (self.wait_text, ConfigNothing()) )
self.updateList(list)
def showScreen(self):
screen = self.handler.getMMIScreen(self.slotid)
list = [ ]
self.timer.stop()
if len(screen) > 0 and screen[0][0] == "CLOSE":
timeout = screen[0][1]
self.mmiclosed = True
if timeout > 0:
self.timer.start(timeout*1000, True)
else:
self.keyCancel()
else:
self.mmiclosed = False
self.tag = screen[0][0]
for entry in screen:
if entry[0] == "PIN":
self.addEntry(list, entry)
else:
if entry[0] == "TITLE":
self["title"].setText(entry[1])
elif entry[0] == "SUBTITLE":
self["subtitle"].setText(entry[1])
elif entry[0] == "BOTTOM":
self["bottom"].setText(entry[1])
elif entry[0] == "TEXT":
self.addEntry(list, entry)
self.updateList(list)
def ciStateChanged(self):
do_close = False
if self.action == 0: #reset
do_close = True
if self.action == 1: #init
do_close = True
#module still there ?
if self.handler.getState(self.slotid) != 2:
do_close = True
#mmi session still active ?
if self.handler.getMMIState(self.slotid) != 1:
do_close = True
if do_close:
self.closeMmi()
elif self.action > 1 and self.handler.availableMMI(self.slotid) == 1:
self.showScreen()
#FIXME: check for mmi-session closed
class CiMessageHandler:
def __init__(self):
self.session = None
self.ci = { }
self.dlgs = { }
eDVBCI_UI.getInstance().ciStateChanged.get().append(self.ciStateChanged)
SystemInfo["CommonInterface"] = eDVBCIInterfaces.getInstance().getNumOfSlots() > 0
try:
file = open("/proc/stb/tsmux/ci0_tsclk", "r")
file.close()
SystemInfo["CommonInterfaceSupportsHighBitrates"] = True
except:
SystemInfo["CommonInterfaceSupportsHighBitrates"] = False
def setSession(self, session):
self.session = session
def ciStateChanged(self, slot):
if slot in self.ci:
self.ci[slot](slot)
else:
if slot in self.dlgs:
self.dlgs[slot].ciStateChanged()
elif eDVBCI_UI.getInstance().availableMMI(slot) == 1:
if self.session and not config.usage.hide_ci_messages.getValue():
self.dlgs[slot] = self.session.openWithCallback(self.dlgClosed, MMIDialog, slot, 3)
def dlgClosed(self, slot):
if slot in self.dlgs:
del self.dlgs[slot]
def registerCIMessageHandler(self, slot, func):
self.unregisterCIMessageHandler(slot)
self.ci[slot] = func
def unregisterCIMessageHandler(self, slot):
if slot in self.ci:
del self.ci[slot]
CiHandler = CiMessageHandler()
class CiSelection(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("Common Interface"))
self["actions"] = ActionMap(["OkCancelActions", "CiSelectionActions"],
{
"left": self.keyLeft,
"right": self.keyLeft,
"ok": self.okbuttonClick,
"cancel": self.cancel
},-1)
self.dlg = None
self.state = { }
self.list = [ ]
for slot in range(MAX_NUM_CI):
state = eDVBCI_UI.getInstance().getState(slot)
if state != -1:
self.appendEntries(slot, state)
CiHandler.registerCIMessageHandler(slot, self.ciStateChanged)
menuList = ConfigList(sel |
normanmaurer/autobahntestsuite-maven-plugin | src/main/resources/twisted/python/runtime.py | Python | apache-2.0 | 4,513 | 0.003988 | # -*- test-case-name: twisted.python.test.test_runtime -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
import os
import sys
import time
import imp
import warnings
from twisted.python i | mport compat
if compat._PY3:
_threadModule = "_thread"
else:
_threadModule = "thread"
def shortPythonVersion():
"""
Returns the Python version as a dot-separated string.
"""
return "%s.%s.%s" % sys.version_info[:3]
knownPlatforms = {
'nt': 'win32',
'ce': 'win32',
'posix': 'posix',
'java': 'java',
'org.python.mo | dules.os': 'java',
}
_timeFunctions = {
#'win32': time.clock,
'win32': time.time,
}
class Platform:
"""
Gives us information about the platform we're running on.
"""
# See http://twistedmatrix.com/trac/ticket/3413
# By oberstet
if os.name == 'java' and hasattr(os, '_name'):
if os._name == 'posix':
osName = os.name
else:
## see:
## http://bugs.jython.org/issue1521
## http://bugs.jython.org/msg7927
osName = os._name
else:
osName = os.name
type = knownPlatforms.get(osName)
seconds = staticmethod(_timeFunctions.get(type, time.time))
_platform = sys.platform
def __init__(self, name=None, platform=None):
if name is not None:
self.type = knownPlatforms.get(name)
self.seconds = _timeFunctions.get(self.type, time.time)
if platform is not None:
self._platform = platform
def isKnown(self):
"""
Do we know about this platform?
@return: Boolean indicating whether this is a known platform or not.
@rtype: C{bool}
"""
return self.type != None
def getType(self):
"""
Get platform type.
@return: Either 'posix', 'win32' or 'java'
@rtype: C{str}
"""
return self.type
def isMacOSX(self):
"""
Check if current platform is Mac OS X.
@return: C{True} if the current platform has been detected as OS X.
@rtype: C{bool}
"""
return self._platform == "darwin"
def isWinNT(self):
"""
Are we running in Windows NT?
This is deprecated and always returns C{True} on win32 because
Twisted only supports Windows NT-derived platforms at this point.
@return: C{True} if the current platform has been detected as
Windows NT.
@rtype: C{bool}
"""
warnings.warn(
"twisted.python.runtime.Platform.isWinNT was deprecated in "
"Twisted 13.0. Use Platform.isWindows instead.",
DeprecationWarning, stacklevel=2)
return self.isWindows()
def isWindows(self):
"""
Are we running in Windows?
@return: C{True} if the current platform has been detected as
Windows.
@rtype: C{bool}
"""
return self.getType() == 'win32'
def isVista(self):
"""
Check if current platform is Windows Vista or Windows Server 2008.
@return: C{True} if the current platform has been detected as Vista
@rtype: C{bool}
"""
if getattr(sys, "getwindowsversion", None) is not None:
return sys.getwindowsversion()[0] == 6
else:
return False
def isLinux(self):
"""
Check if current platform is Linux.
@return: C{True} if the current platform has been detected as Linux.
@rtype: C{bool}
"""
return self._platform.startswith("linux")
def supportsThreads(self):
"""
Can threads be created?
@return: C{True} if the threads are supported on the current platform.
@rtype: C{bool}
"""
try:
return imp.find_module(_threadModule)[0] is None
except ImportError:
return False
def supportsINotify(self):
"""
Return C{True} if we can use the inotify API on this platform.
@since: 10.1
"""
try:
from twisted.python._inotify import INotifyError, init
except ImportError:
return False
try:
os.close(init())
except INotifyError:
return False
return True
platform = Platform()
platformType = platform.getType()
seconds = platform.seconds
|
BioModelTools/TemplateSB | run.py | Python | mit | 543 | 0.003683 | """
Running the template pre-processor standalone.
Input: Templated Antimony model (stdin)
Output: Expanded Antimony model (stdout)
"""
import fileinput
import os
import sys
directory = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(directory, "T | emplateSB")
sys.path.append(path)
from template_processor import TemplateProc | essor
template_stg = ''
for line in fileinput.input():
template_stg += "\n" + line
processor = TemplateProcessor(template_stg)
expanded_stg = processor.do()
sys.stdout.write(expanded_stg)
|
bennahugo/RFIMasker | RFIMasker/version.py | Python | gpl-3.0 | 88 | 0.022727 | # Do not edit this file, pipeli | ne versioning is governed by git tags
__version__="1.0.1 | " |
netleibi/fastchunking | fastchunking/benchmark.py | Python | apache-2.0 | 988 | 0.003036 | import os
import time
import timeit
import fastchunking
if __name__ == '__main__':
print("Benchmarking RabinKarpChunking creation time...")
NUMBER = 10000
total_time = timeit.timeit('fastchunking.RabinKarpCDC(48, 0).create_chunker(128)',
setup='import fastchunking', number=NUMBER)
print("average creation time: {:f}s\n".format(total_time / NUMBER))
print("Benchmarking RabinKarpChunking chunking throughput...")
SIZE = 100 * 1024 * 1024 # 100 MiB
for chunk_size in (2 ** i for i in range(6, 16)):
chunker = fastchunking.RabinKarpCDC(48, 0).create_chunker(chunk_size)
content = os.urandom(SIZE)
t = time.time()
list(chunker.next_chunk_boundaries(content, 0))
msg = "chunking throughput (ch | unk size = {chunk_size:5d} bytes): {throughput:7.2f} MiB/s"
t = time.time() - t
print(msg.format(chunk_size=chunk_size, throughput=SIZE / 1024 / 1024 / t if t else float('i | nf')))
|
paurosello/frappe | frappe/tests/test_search.py | Python | mit | 1,419 | 0.026779 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.desk.search import search_link
class TestSearch(unittest.TestCase):
def test_search_field_sanitizer(self):
# pass
search_link('DocType', 'User', query=None, filters=None, page_length=20, searchfield='name')
result = frappe.response['results'][0]
self.assertTrue('User' in result['value'])
#raise exception on injection
self.assertRaises(frappe.DataError,
search_link, 'DocType', 'Customer', query=None, filters=None,
page_length=20, searchfield='1=1')
self.assertRaises(frappe.DataError,
search_link, 'DocType', 'Customer', query=None, filters=None,
page_length=20 | , searchfield='select * from tabSessions) --')
self.assertRaises(frappe.DataError,
search_link, 'DocType', 'Customer', query=None, filters=None,
page_length=20, searchfield='name or (select * from tabSessions)')
self.assertRaises(frappe.DataError,
search_link, 'DocType', 'Customer', query=None, filters=None,
page_length=20, searchfield='*')
self.assertRaises(frappe.DataError,
search_link, 'DocTy | pe', 'Customer', query=None, filters=None,
page_length=20, searchfield=';')
self.assertRaises(frappe.DataError,
search_link, 'DocType', 'Customer', query=None, filters=None,
page_length=20, searchfield=';')
|
QualiSystems/vCenterShell | package/cloudshell/cp/vcenter/models/DeployFromImageDetails.py | Python | apache-2.0 | 396 | 0.005051 | from cloudshell.cp.vcenter.models.vCenterVMFromImageResourceModel import vCenterVMFromImageResourceModel
class DeployFromImageDetails(object):
def __ini | t__(self, image_params, app_name):
""" |
:type image_params: vCenterVMFromImageResourceModel
:type app_name: str
:return:
"""
self.image_params = image_params
self.app_name = app_name |
qusp/orange3 | Orange/widgets/regression/owknnregression.py | Python | bsd-2-clause | 2,723 | 0.000367 | """
"""
import Orange.data
import Orange.regression.knn as knn
import Orange.classification
from Orange.preprocess.preprocess import Preprocess
from | Orange.widgets import widget, gui
from Orange.widgets.settings import Setting
class OWKNNRegression(widget.OWWidget):
name = "k Nearest Neighbors Regression"
description = "K-nearest neighbours learner/model."
icon = "icons/kNearestNeighbours.svg"
priority = 20
inputs = [("Data", Orange.data.Table, "set_data"),
("Preprocessor", Preprocess, "set_preprocessor")]
outputs = [("Learner", knn.KNNRegressionLearner),
("Predictor", Orange.c | lassification.SklModel)]
want_main_area = False
learner_name = Setting("k Nearest Neighbors Regression")
n_neighbors = Setting(5)
metric_index = Setting(0)
def __init__(self, parent=None):
super().__init__(parent)
self.preprocessors = ()
self.data = None
box = gui.widgetBox(self.controlArea, "Learner/Model Name")
gui.lineEdit(box, self, "learner_name")
box = gui.widgetBox(self.controlArea, "Neighbors")
gui.spin(box, self, "n_neighbors", 1, 100, label="Number of neighbors")
box = gui.widgetBox(box, "Metric")
box.setFlat(True)
box.layout().setContentsMargins(0, 0, 0, 0)
gui.comboBox(box, self, "metric_index",
items=["Euclidean", "Manhattan", "Maximal", "Mahalanobis"])
self.metrics = ["euclidean", "manhattan", "chebyshev", "mahalanobis"]
gui.button(self.controlArea, self, "Apply",
callback=self.apply, default=True)
layout = self.layout()
self.layout().setSizeConstraint(layout.SetFixedSize)
self.apply()
def set_data(self, data):
"""Set input training dataset."""
self.data = data
if data is not None:
self.apply()
def set_preprocessor(self, preproc):
"""Set preprocessor to apply on training data."""
if preproc is None:
self.preprocessors = None
else:
self.preprocessors = (preproc,)
self.apply()
def apply(self):
"""
Construct the learner and apply it on the training data if available.
"""
learner = knn.KNNRegressionLearner(
n_neighbors=self.n_neighbors,
metric=self.metrics[self.metric_index],
preprocessors=self.preprocessors
)
learner.name = self.learner_name
model = None
if self.data is not None:
model = learner(self.data)
model.name = self.learner_name
self.send("Learner", learner)
self.send("Predictor", model)
|
facebookresearch/ParlAI | parlai/chat_service/core/socket.py | Python | mit | 5,922 | 0.000844 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this so | urce tree.
import errno
import json
import logging
import threading
import time
import websocket
import parlai.chat_service.utils.logging as log_utils
SOCKET_TIMEOUT = 6
# Socket handler
class ChatServiceMessageSocket:
"""
ChatServiceMessageSocket is a wrapper around websocket to forward messages from the
remote server to the ChatServiceManager.
"""
def __init__(self, server_url, port, message_callback):
| """
server_url: url at which the server is to be run
port: port for the socket to operate on
message_callback: function to be called on incoming message objects (format: message_callback(self, data))
"""
self.server_url = server_url
self.port = port
self.message_callback = message_callback
self.ws = None
self.last_pong = None
self.alive = False
# initialize the state
self.listen_thread = None
# setup the socket
self.keep_running = True
self._setup_socket()
def _safe_send(self, data, force=False):
if not self.alive and not force:
# Try to wait a second to send a packet
timeout = 1
while timeout > 0 and not self.alive:
time.sleep(0.1)
timeout -= 0.1
if not self.alive:
# don't try to send a packet if we're still dead
return False
try:
self.ws.send(data)
except websocket.WebSocketConnectionClosedException:
# The channel died mid-send, wait for it to come back up
return False
return True
def _ensure_closed(self):
try:
self.ws.close()
except websocket.WebSocketConnectionClosedException:
pass
def _send_world_alive(self):
"""
Registers world with the passthrough server.
"""
self._safe_send(
json.dumps(
{
'type': 'world_alive',
'content': {'id': 'WORLD_ALIVE', 'sender_id': 'world'},
}
),
force=True,
)
def _setup_socket(self):
"""
Create socket handlers and registers the socket.
"""
def on_socket_open(*args):
log_utils.print_and_log(logging.DEBUG, 'Socket open: {}'.format(args))
self._send_world_alive()
def on_error(ws, error):
try:
if error.errno == errno.ECONNREFUSED:
self._ensure_closed()
self.use_socket = False
raise Exception("Socket refused connection, cancelling")
else:
log_utils.print_and_log(
logging.WARN, 'Socket logged error: {}'.format(repr(error))
)
except BaseException:
if type(error) is websocket.WebSocketConnectionClosedException:
return # Connection closed is noop
log_utils.print_and_log(
logging.WARN,
'Socket logged error: {} Restarting'.format(repr(error)),
)
self._ensure_closed()
def on_disconnect(*args):
"""
Disconnect event is a no-op for us, as the server reconnects automatically
on a retry.
"""
log_utils.print_and_log(
logging.INFO, 'World server disconnected: {}'.format(args)
)
self.alive = False
self._ensure_closed()
def on_message(*args):
"""
Incoming message handler for messages from the FB user.
"""
packet_dict = json.loads(args[1])
if packet_dict['type'] == 'conn_success':
self.alive = True
return # No action for successful connection
if packet_dict['type'] == 'pong':
self.last_pong = time.time()
return # No further action for pongs
message_data = packet_dict['content']
log_utils.print_and_log(
logging.DEBUG, 'Message data received: {}'.format(message_data)
)
for message_packet in message_data['entry']:
for message in message_packet['messaging']:
self.message_callback(message)
def run_socket(*args):
url_base_name = self.server_url.split('https://')[1]
while self.keep_running:
try:
sock_addr = "wss://{}/".format(url_base_name)
self.ws = websocket.WebSocketApp(
sock_addr,
on_message=on_message,
on_error=on_error,
on_close=on_disconnect,
)
self.ws.on_open = on_socket_open
self.ws.run_forever(ping_interval=1, ping_timeout=0.9)
except Exception as e:
log_utils.print_and_log(
logging.WARN,
'Socket error {}, attempting restart'.format(repr(e)),
)
time.sleep(0.2)
# Start listening thread
self.listen_thread = threading.Thread(
target=run_socket, name='Main-Socket-Thread'
)
self.listen_thread.daemon = True
self.listen_thread.start()
time.sleep(1.2)
while not self.alive:
try:
self._send_world_alive()
except Exception:
pass
time.sleep(0.8)
|
BlackEarth/bxml | bxml/rng.py | Python | mpl-2.0 | 408 | 0.019608 |
from .xml import XML
class RNG(XML):
NS | = {
'a': "http://relaxng.org/ns/compatibility/annotations/1.0",
'epub': "http://www.idpf.org/2007/ops",
'sch': "http://purl.oclc.org/dsdl/schematron",
'html': "http://www.w3.org/1999/xhtml",
'r': "http://relaxng.org/ns/structure/1.0",
'datatypeL | ibrary': "http://www.w3.org/2001/XMLSchema-datatypes"
}
|
oemof/oemof_base | src/oemof/solph/groupings.py | Python | gpl-3.0 | 2,875 | 0 | # -*- coding: utf-8 -*-
"""Groupings needed on an energy system for it to work with solph.
If you want to use solph on an energy system, you need to create it with these
groupings specified like this:
.. code-block: python
from oemof.network import EnergySystem
import solph
energy_system = EnergySystem(groupings=solph.GROUPINGS)
SPDX-FileCopyrightText: Uwe Krien <krien@uni-bremen.de>
SPDX-FileCopyrightText: Simon Hilpert
SPDX-FileCopyrightText: Cord Kaldemeyer
SPDX-FileCopyrightText: Stephan Günther
SPDX-License-Identifier: MIT
"""
from oemof.network import groupings as groupings
from oemof.solph import blocks
def constraint_grouping(node, fallback=lambda *xs, **ks: None):
"""Grouping function for constraints.
This function can be passed in a list to :attr:`groupings` of
:class:`oemof.solph.network.EnergySystem`.
Parameters
----------
node : :class:`Node <oemof.network.Node`
The node for which the figure out a constraint group.
fallback : callable, optional
A function of one argument. If `node` doesn't have a `constraint_group`
attribute, this is used to group the node instead. Defaults to not
group the node at all.
"""
# TODO: Refactor this for looser coupling between modules.
# This code causes an unwanted tight coupling between the `groupings` and
# `network` modules, resulting in ha | ving to do an import at runtime in the
# init method of solph's `EnergySystem`. A better way would be to add a
# method (maybe `constraints`, `constraint_group`, `constraint_type` or
# something like that) to solph's node hierarchy, which gets overridden in
# each s | ubclass to return the appropriate value. Then we can just call the
# method here.
# This even gives other users/us the ability to customize/extend how
# constraints are grouped by overriding the method in future subclasses.
cg = getattr(node, "constraint_group", fallback)
return cg()
standard_flow_grouping = groupings.FlowsWithNodes(constant_key=blocks.Flow)
def _investment_grouping(stf):
if hasattr(stf[2], "investment"):
if stf[2].investment is not None:
return True
else:
return False
investment_flow_grouping = groupings.FlowsWithNodes(
constant_key=blocks.InvestmentFlow,
# stf: a tuple consisting of (source, target, flow), so stf[2] is the flow.
filter=_investment_grouping,
)
def _nonconvex_grouping(stf):
if hasattr(stf[2], "nonconvex"):
if stf[2].nonconvex is not None:
return True
else:
return False
nonconvex_flow_grouping = groupings.FlowsWithNodes(
constant_key=blocks.NonConvexFlow, filter=_nonconvex_grouping
)
GROUPINGS = [
constraint_grouping,
investment_flow_grouping,
standard_flow_grouping,
nonconvex_flow_grouping,
]
|
drayanaindra/shoop | shoop/core/models/product_media.py | Python | agpl-3.0 | 3,018 | 0.002319 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import with_statement
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from easy_thumbnails.files import get_thumbnailer
from enumfields import Enum, EnumIntegerField
from filer.fields.file import FilerFileField
from parler.models import TranslatableModel, TranslatedFields
from shoop.core.fields import InternalIdentifierField
class ProductMediaKind(Enum):
GENERIC_FILE = 1
IMAGE = 2
DOCUMENTATION = 3
SAMPLE = 4
class Labels:
GENERIC_FILE = _('file')
IMAGE = _('image')
DOCUMENTATION = _('documentation')
SAMPLE = _('sample')
@python_2_unicode_compatible
class ProductMedia(TranslatableModel):
identifier = InternalIdentifierField()
product = models.ForeignKey("Product", related | _name="media")
shops = models.ManyToManyField("Shop", related_name="product_media")
kind = EnumIntegerField(
ProductMediaKind, db_index | =True, default=ProductMediaKind.GENERIC_FILE, verbose_name=_('kind')
)
file = FilerFileField(blank=True, null=True, verbose_name=_('file'))
external_url = models.URLField(blank=True, null=True, verbose_name=u'URL')
ordering = models.IntegerField(default=0)
# Status
enabled = models.BooleanField(db_index=True, default=True, verbose_name=_("enabled"))
public = models.BooleanField(default=True, blank=True, verbose_name=_('public (shown on product page)'))
purchased = models.BooleanField(
default=False, blank=True, verbose_name=_('purchased (shown for finished purchases)')
)
translations = TranslatedFields(
title=models.CharField(blank=True, max_length=128, verbose_name=_('title')),
description=models.TextField(blank=True, verbose_name=_('description')),
)
class Meta:
verbose_name = _('product attachment')
verbose_name_plural = _('product attachments')
ordering = ["ordering", ]
def __str__(self): # pragma: no cover
return self.effective_title
@property
def effective_title(self):
title = self.safe_translation_getter("title")
if title:
return title
if self.file_id:
return self.file.label
if self.external_url:
return self.external_url
return _('attachment')
@property
def url(self):
if not self.public:
raise ValueError("`get_effective_url()` may not be used on non-public media")
if self.file_id:
return self.file.url
else:
return self.external_url
@property
def easy_thumbnails_thumbnailer(self):
if self.file_id:
return get_thumbnailer(self.file)
|
svb2357/projecteuler | 14/answer.py | Python | gpl-2.0 | 492 | 0.012195 | #!/usr/bin/env python2
# Solution to project euler problem 14
def collatzlength(start):
x = start
cnt = 0
while x != 1:
x = nextcollatz(x)
| cnt += 1
return cnt
def nextcollatz(x):
if x % 2 == 0:
return x/2
else:
return 3*x + 1
def solution(n):
mx = (0,0)
for i in xrange(2, n + 1):
ln = collatzlength(i)
if ln > mx[1]:
mx | = (i, ln)
print mx
return mx
solution(1000000)
|
realestate-com-au/bespin | bespin/operations/plan.py | Python | mit | 829 | 0.006031 | from bespin.errors import BadOption, MissingPlan
from in | put_algorithms.spec_base import NotSpecified
class Plan(object):
@classmethod
def find_stacks(kls, configuration, stacks, plan):
if plan in (None, NotSpecified):
raise BadOption("Please specify a plan", available=list(configuration["plans"].keys()))
if plan not in configuration["plans"]:
raise MissingPlan(wanted=plan, available=configuration["plans"].keys())
mi | ssing = []
for stack in configuration["plans"][plan]:
if stack not in stacks:
missing.append(stack)
if missing:
raise BadOption("Some stacks in the plan don't exist", missing=missing, available=list(stacks.keys()))
for stack in configuration["plans"][plan]:
yield stack
|
Phexcom/product-launcher | lwc/settings/base.py | Python | gpl-3.0 | 3,530 | 0.005099 | """
Django settings for lwc project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable | for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7fm_f66p8e!p%o=sr%d&cue(%+bh@@j_y6*b3d@t^c5%i8)1)2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
#Share url
SH | ARER_URL = "http://127.0.0.1:8000/?ref="
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'joins',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'lwc.middleware.ReferMiddleware',
]
ROOT_URLCONF = 'lwc.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lwc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static', 'static_root')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static', 'static_dirs'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'static', 'media')
|
wodo/WebTool3 | webtool/server/migrations/0025_instruction_special.py | Python | bsd-2-clause | 519 | 0.001927 | # -*- coding: utf-8 | -*-
# Generated by Django 1.11.6 on 2018-12-11 04:11
from __future__ import unicode_literals
from django.db import migrations, mod | els
class Migration(migrations.Migration):
dependencies = [
('server', '0024_auto_20181210_2022'),
]
operations = [
migrations.AddField(
model_name='instruction',
name='special',
field=models.BooleanField(default=False, help_text='Kreative Kursinhalte', verbose_name='Spezialkurs'),
),
]
|
thesgc/chembiohub_ws | deployment/settings/default.py | Python | gpl-3.0 | 4,286 | 0.012599 | from .base import *
import os
import pwd
import sys
DEBUG=True
TEMPLATE_DEBUG = DEBUG
def get_username():
return pwd.getpwuid( os.getuid() )[ 0 ]
CONDA_ENV_PATH = os.getenv("CONDA_ENV_PATH")
ENV_NAME = os.path.split(CONDA_ENV_PATH)[1]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '%s_db' % ENV_NAME, # Or path to database file if using sqlite3.
'USER': get_username(), # Not used with sqlite3.
'PASSWORD': '', # Not used witis oracle
'HOST': os.getenv("CONDA_ENV_PATH") + '/var/postgressocket', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
'TEST_NAME' : 'dev_db'
},
}
SESSION_COOKIE_NAME = '%s_sessionid' % ENV_NAME
CSRF_COOKIE_NAME = '%scsrftoken' % ENV_NAME
STATIC_ROOT = '%s/deployment/static' % BASE_DIR
MEDIA_ROOT = '%s/var/media/' % CONDA_ENV_PATH
FLOWJS_PATH = MEDIA_ROOT + 'flow'
LOGIN_REDIRECT_URL = '/%s/#/projects/list' % ENV_NAME
LOGIN_URL = '/%s/login' % ENV_NAME
LOGOUT_REDIRECT_URL = '/%s/login' % ENV_NAME
WEBSERVICES_NAME='%s/api' % ENV_NAME
INCHI_BINARIES_LOCATION = {"1.02" :"%s/var/INCHI-1-BIN/linux/64bit/inchi-1" % CONDA_ENV_PATH}
SESSION_CACHE_ALIAS= ENV_NAME
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
},
ENV_NAME: {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
ES_PREFIX = ENV_NAME
STATIC_URL = '/%s/static/' % ENV_NAME
STATICFILES_DIRS = (
'%s/src/ng-chem' % BASE_DIR,
'%s/src/ng-chem/dist' % BASE_DIR,
)
#Add a template dir so that the html content of index.html can be brought in as a static template when in production so login is handled by Django - see base.py in cbh_chembl_ws_extension (Index() view)
TEMPLATE_DIRS = (
'%s/src/ng-chem/' % BASE_DIR,
)
#Set to 'DEBUG' to view all SQL
DEBUG_SQL = 'INFO'
LOGGIN | G = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d % | (message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': { #Logging is sent out to standard out
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': DEBUG_SQL,
'propagate': True,
},
},
}
for app in INSTALLED_APPS:
LOGGING["loggers"][app] = {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
}
Q_CLUSTER = {
'name': 'DJRedis',
'workers': 12,
'timeout': None,
'django_redis': ENV_NAME,
'catch_up' : False
}
ROOT_URLCONF = 'deployment.urls_v2'
REDIS_FOR_ID_GENERATOR = ENV_NAME
try:
from .secret import *
except ImportError:
print "No Secret settings, using default secret key which is insecure"
try:
import django_webauth
INSTALLED_APPS = list(INSTALLED_APPS) + ["django_webauth",]
except ImportError:
pass
for key, field in TABULAR_DATA_SETTINGS["schema"].items():
field["export_name"] = "%s:%s:%s" % (ID_PREFIX, ENV_NAME, field["knownBy"])
|
mrquim/mrquimrepo | script.module.livestreamer/lib/livestreamer/plugins/oldlivestream.py | Python | gpl-2.0 | 697 | 0.002869 | import re
from livestreamer.plugin import Plugin
from livestreamer.stream import HLSStream
PLAYLIST_URL = "http://x{0}x.api.channel.livestream.com/3.0/playlist.m3u8"
_url_re = re.compile("http(s)?://(www\.)?livestream.com/(?P<channel>[^&?/]+)")
class OldLivestream(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
def _get_streams(self):
match = _url_re.match(self.url)
channel = match.group("channel")
channel = channel.replace("_", "-")
playlist_url | = PLAYLIST_URL.format(channel)
return HLSStream.parse_variant_playlist(self.session, playlist_url, check_stre | ams=True)
__plugin__ = OldLivestream
|
mick001/utility-scripts | print_autocad_drawings.py | Python | gpl-2.0 | 8,162 | 0.010904 | # -*- coding: utf-8 -*-
"""
Created on Sun Sep 3 17:20:04 2017
@author: Michy
@name: AutoCAD drawing printer BOT.
@description:
This program is a BOT that prints to pdf all the .dwg files in a given folder.
Given a folder (data_wd) the program will try to print every .dwg file in that
folder.
IMPORTANT NOTE: This program works assuming that your AutoCAD installation is
configured as mine. Which is a very bold assumption. The program works in the
following manner:
1) | It gathers all the .dwg files in the given directory.
2) For each file, it opens the file with the default program (which,
is assumed to be AutoCAD) then CTRL + P is pressed to open the print menu.
Then, the k | eys "m" and "i" are pressed in sequence, since this is enough
to select the PDF printer in my case. Then "Enter" is pressed, the name
of the output pdf file is entered, "enter" is pressed again, and then finally
the drawing is closed using "CTRL + F4".
Please make sure that this procedure applies to your machine as well. Otherwise
make custom adjustments to the Pyautogui procedure as needed.
The program outputs a log in a file named logfile.log (how original!). Example of
a log file output is given below:
INFO:DWG Printer bot V. 1.0:Program ran at 2017-09-03 21:35:09. Directory chosen: C:\Users\Michy\Desktop\autocad_tutorial
INFO:DWG Printer bot V. 1.0:Printed Drawing2.dwg
INFO:DWG Printer bot V. 1.0:Printed Drawing2_1.dwg
INFO:DWG Printer bot V. 1.0:Printed Drawing3.dwg
INFO:DWG Printer bot V. 1.0:Printed Drawing3_1.dwg
INFO:DWG Printer bot V. 1.0:Program ended at 2017-09-03 21:36:14
@examples:
Example and more details will be provided soon at www.firsttimeprogrammer.blogspot.com
"""
#-------------------------------------------------------------------------------
# Imports
import os
import sys
import time
import psutil
import logging
import pyautogui as pgui
from datetime import datetime
VERSION = '1.0'
#-------------------------------------------------------------------------------
# Functions
def extract_selected_format(directory_path, file_extension='dwg'):
# This function extracts all the files of the selected format from the
# directory specified in the variable data_wd. Format is recognized through
# the extension.
#
# Arguments:
# directory_path - string - path from where to extract files.
# file_extension - string - extension of the file. Example: 'dwg'
#
# Returns: List of complete path to each file.
#
files = os.listdir(directory_path)
files = [file for file in files if file.split('.')[1] == file_extension]
files = [os.path.join(directory_path, file) for file in files]
return files
def check_program_running(p_name):
# Checks if the program p_name is currently running.
#
# Arguments:
# p_name - string - name of the program. Example: 'acad.exe'
#
# Returns: True if the program is running, False otherwise.
#
processes = [psutil.Process(p) for p in psutil.pids()]
processes = [p.name().lower() for p in processes]
if p_name in processes:
return True
else:
return False
def check_file_exist(directory_path ,file_name):
# Checks if the file file_name exists
#
# Arguments:
# directory_path - string - directory where to check.
# file_name - string - name of the file to be checked.
#
# Returns: True if it exists, False otherwise
#
files = os.listdir(directory_path)
printed = file_name in files
return printed
def print_file_pdf(file_path, directory_path, logger):
# Prints to pdf all the dwg files in the path specified
#
# Arguments:
# file_path - string - path containing the dwg files.
# directory_path - string - directory where the file is.
# logger - a logger.
#
# Returns: exit_status - int - 1 if Critical error occurred, 0 otherwise
#
#
file_name = file_path.split("\\")[-1] # Get name of the file
file_name_pdf = file_name.replace("dwg", "pdf") # Get name of the output file
is_printed = check_file_exist(directory_path, file_name_pdf) # Check if already existing
exit_status = 0 # 1 only if critical error occurs
if not is_printed:
program_runs = check_program_running("acad.exe")
# Opens file with default program (Autocad for .dwg files).
# If opening fails, returns 1, else 0.
exit_status = os.system(" ".join(["start", file_path]))
if exit_status == 0:
if program_runs:
time.sleep(5)
else:
# Autocad is slow to start up if it is not already running.
time.sleep(30)
print(" ".join(["Now printing file", file_name]))
# Starting printing sequence
pgui.hotkey("ctrl","p")
pgui.typewrite("mi")
pgui.keyDown("enter")
pgui.keyUp("enter")
time.sleep(1)
pgui.typewrite(file_name_pdf)
pgui.keyDown("enter")
pgui.keyUp("enter")
time.sleep(3)
# Closing current drawing
pgui.hotkey("ctrl", "F4")
pgui.keyDown("n")
pgui.keyUp("n")
is_printed = check_file_exist(directory_path, file_name_pdf)
if is_printed:
logger.log(logging.INFO, " ".join(["Printed", file_name]))
else:
logger.log(logging.CRITICAL, " ".join(["NOT Printed", file_name]))
exit_status = 1
else:
logger.log(logging.ERROR, "".join(["Could not open ", file_name]))
logger.log(logging.ERROR, " ".join(["NOT Printed", file_name]))
else:
logger.log(logging.INFO, " ".join(["Already printed", file_name]))
time.sleep(1)
return exit_status
def handle_critical_errors(error_count, check_at_count=2):
# Two or more critical errors may indicate a severe malfunctioning in the
# program. This function displays a confirmation box asking the user if
# they want to continue or stop the program after check_at_count critical
# errors occurred.
#
# Arguments:
# error_count - string - count of the critical errors.
# check_at_count - int - check after check_at_count critical errors occurred.
#
# Returns: void
#
if error_count >= check_at_count:
value = pgui.confirm("""2 or more critical errors have occurred.
Would you like to continue (click "ok")
or cancel?""")
if value == 'Cancel':
sys.exit()
#-------------------------------------------------------------------------------
# Main
if __name__ == '__main__':
# Setting input data directory
data_wd = "C:\\Users\\Michy\\Desktop\\autocad_tutorial"
full_file_path = extract_selected_format(data_wd)
print("\nFollowing files will be printed: {}".format([full_file_path]))
pgui.PAUSE = 0.5
time_stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
error_count = 0
logging.basicConfig(filename = 'logfile.log', level=logging.DEBUG)
logger = logging.getLogger('DWG Printer bot V. {}'.format(VERSION))
logger.log(logging.INFO,
"Program ran at {}. Directory chosen: {}".format(time_stamp, data_wd))
# Start printing
for file in full_file_path:
try:
error_count += print_file_pdf(file, data_wd, logger)
print("Exiting current printing call...\n")
handle_critical_errors(error_count, 2)
except Exception as e:
print(str(e))
final_time_stamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
logger.log(logging.INFO, "Program ended at {}\n\n".format(final_time_stamp))
pgui.alert("Job ended!")
|
Hawk94/dust | backend/users/test/test_views.py | Python | mit | 1,735 | 0.000576 | from django.urls import reverse
from django.forms.models import model_to_dict
from django.contrib.auth.hashers import check_password
from nose.tools import ok_, eq_
from rest_framework.test import APITestCase
from faker import Faker
from ..models import U | ser
from .factories import UserFactory
fake = Faker()
class TestUserAPI(APITestCase):
"""
Tests the /users endpoint.
"""
def setUp(self):
self.url = reverse('user-list')
self.user_data = model_to_dict(UserFactory.build())
def test_post_request_with_no_data_fails(self):
response = self.client.post(self.url, {})
eq_(response.status_code, 400)
|
def test_post_request_with_valid_data_succeeds(self):
response = self.client.post(self.url, self.user_data)
eq_(response.status_code, 201)
user = User.objects.get(pk=response.data.get('id'))
eq_(user.username, self.user_data.get('username'))
ok_(check_password(self.user_data.get('password'), user.password))
class TestUserDetailAPI(APITestCase):
def setUp(self):
self.user = UserFactory()
self.url = reverse('user-detail', kwargs={'pk': self.user.pk})
self.client.credentials(HTTP_AUTHORIZATION='Token {}'.format(self.user.auth_token))
def test_get_request_returns_a_given_user(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_put_request_updates_a_user(self):
new_first_name = fake.first_name()
payload = {'first_name': new_first_name}
response = self.client.put(self.url, payload)
eq_(response.status_code, 200)
user = User.objects.get(pk=self.user.id)
eq_(user.first_name, new_first_name)
|
semk/voldemort | voldemort/config.py | Python | apache-2.0 | 1,783 | 0 | # -*- coding: utf-8 -*-
#
# Voldemort config
#
# @author: Sreejith K
# Created On 19th Sep 2011
import os
import logging
from yaml import load, dump, Loader, Dumper
log = logging.getLogger(__name__)
DEFAULT_CONFIG = """\
# voldemort configuration file
layout_dirs :
- layout
- include
posts_dir : posts
site_dir : _site
post_url : "%Y/%m/%d"
paginate : 5
"""
class Config(object):
"""Converts a dict to object.
"""
def __init__(self, dict):
self.__dict__.update(dict)
def load_config(work_dir, name='settings.yaml'):
"""Loads the configuration from the working directory. Else loads
the default config.
"""
lo | g.info('Loading voldemort configuration')
config_file = os.path.join(work_dir, name)
if not os.pa | th.exists(config_file):
write_config = raw_input(
'No configuration file found. Write default config? [Y/n]: ')
write_config = (
write_config == 'Y' or
write_config == 'y' or
write_config == '') and True or False
if write_config:
log.info('Writing default config at %s' % config_file)
with open(config_file, 'w') as f:
f.write(DEFAULT_CONFIG)
# read the config file
with open(config_file, 'r') as f:
config = load(f, Loader=Loader)
# add the missing configurations
default_config = load(DEFAULT_CONFIG, Loader=Loader)
default_config.update(config)
config = Config(default_config)
# fix the paths
config.layout_dirs = [os.path.join(
work_dir, ld) for ld in config.layout_dirs]
config.posts_dir = os.path.join(work_dir, config.posts_dir)
config.site_dir = os.path.join(work_dir, config.site_dir)
return config
|
anchore/anchore | anchore/anchore-modules/queries/get-retrieved-files.py | Python | apache-2.0 | 3,253 | 0.004304 | #!/usr/bin/env python
import sys
import os
import re
import traceback
import tarfile, io
import anchore.anchore_utils
def get_retrieved_file(imgid, srcfile, dstdir):
ret = list()
extractall = False
if srcfile == 'all':
extractall = True
thedstdir = os.path.join(dstdir, imgid)
tarfiles = list()
namespaces = anchore.anchore_utils.load_files_namespaces(imgid)
if namespaces:
for namespace in namespaces:
stored_data_tarfile = anchore.anchore_utils.load_files_tarfile(imgid, namespace)
if stored_data_tarfile:
tarfiles.append(stored_data_tarfile)
else:
stored_data_tarfile = anchore.anchore_utils.load_files_tarfile(imgid, 'retrieve_files')
if stored_data_tarfile:
tarfiles.append(stored_data_tarfile)
for thetarfile in tarfiles:
filetar = tarfile.open(thetarfile, mode='r:gz', format=tarfile.PAX_FORMAT)
for ff in filetar.getmembers():
patt = re.match("imageroot("+re.escape(srcfile)+")", ff.name)
if extractall or patt:
filetar.extract(ff, thedstdir)
scrubbed_name = re.sub("imageroot", "", ff.name)
ret.append([scrubbed_name, os.path.join(thedstdir, ff.name)])
filetar.close()
if namespaces:
for namespace in namespaces:
anchore.anchore_utils.del_files_cache(imgid)
return(ret)
# main routine
try:
config = anchore.anchore_utils.init_query_cmdline(sys.argv, "params: <source_filename> <destination_dir> ...\nhelp: Extract <source_filename> from stored files and copy to local host at <destination_dir>/<source_filename>. Use 'all' as <source_filename> to extract all stored files into <destination_dir>")
except Exception as err:
print str(err)
sys.exit(1)
if not config:
sys.exit(0)
if len(config['params']) < 2:
print "Query requires input: <source_filename> <destination_dir> ..."
sys.exit(1)
srcfile = config['params'][0]
dstdir = config['params'][1]
warns = list()
outlist = list()
outlist.append(["Image_Id", "Repo_Tags", "Stored_File_Name", "Output_Location"])
tags = "none"
if config['meta']['humanname']:
tags = config['meta']['humanname']
imgid = config['meta']['shortId']
try:
# handle the good case, something is found resulting in data matching the required columns
retlist = get_retrieved_file(config['imgid'], srcfile, dstdir)
if retlist:
for ret in retlist:
| srcname = ret[0]
dstname = ret[1]
outlist.append([imgid, tags, srcname, dstname])
else:
warns.append("Could not find any stored files matching input '"+str(srcfile)+"' in image's stored files")
except Exception as err:
# handle the case where something wrong happened
warns.append("Unable to load stored files data - try re-analyzing image")
import traceback
traceback.print_exc()
print str(err)
# handle the no | match case
if len(outlist) < 1:
#outlist.append(["NOMATCH", "NOMATCH", "NOMATCH"])
pass
anchore.anchore_utils.write_kvfile_fromlist(config['output'], outlist)
if len(warns) > 0:
anchore.anchore_utils.write_plainfile_fromlist(config['output_warns'], warns)
sys.exit(0)
|
jbd/treewatcher | examples/monitor-threaded.py | Python | gpl-3.0 | 4,149 | 0.00482 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#
# Copyright (c) 2010 Jean-Baptiste Denis.
#
# This is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 3 and superior as published by the Free
# Software Foundation.
#
# A copy of the license has been included in the COPYING file.
import sys
import os
import logging
import threading
try:
# first we try system wide
import treewatcher
except ImportError:
# if it fails, we try it from the project source directory
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.path.pardir))
import treewatcher
from treewatcher import ThreadedEventsCallbacks, choose_source_tree_monitor
_LOGGER = logging.getLogger('_LOGGER')
_LOGGER.setLevel(logging.INFO)
_LOGGER.addHandler(logging.StreamHandler())
class MonitorCallbacks(ThreadedEventsCallbacks):
"""
Example callbacks which will output the event and path
This is a threaded type callbacks object : they will be
called from a different thread of the monitor.
We need to use logging here to prevent messy output.
You need to protect shared state from concurrent access
using Lock for example
"""
def create(self, path, is_dir):
""" callback called on a 'IN_CREATE' event """
_LOGGER.info("create: %s %s %s" % (path, is_dir, threading.current_thread().name))
def delete(self, path, is_dir):
""" callback called on a 'IN_DELETE' event """
_LOGGER.info("delete: %s %s %s" % (path, is_dir, threading.current_thread().name))
def close_write(self, path, is_dir):
""" callback called on a 'IN_CLOSE_WRITE' event """
_LOGGER.info("close_write: %s %s %s" % (path, is_dir, threading.current_thread().name))
def moved_from(self, path, is_dir):
""" callback called on a 'IN_MOVED_FROM' event """
_LOGGER.info("moved_from: %s %s %s" % (path, is_dir, threading.current_thread().name))
def moved_to(self, path, is_dir):
""" callback called on a 'IN_MOVED_TO' event """
_LOGGER.info("moved_to: %s %s %s" % (path, is_dir, threading.current_thread().name))
def modify(self, path, is_dir):
""" callback called on a 'IN_MODIFY' event """
_LOGGER.info("modify: %s %s %s" % (path, is_dir, threading.current_thread().name))
def attrib(self, path, is_dir):
""" callback called on a 'IN_ATTRIB' event """
_LOGGER.info("attrib: %s %s %s" % (path, is_dir, threading.current_thr | ead().name))
def unmount(self, path, is_dir):
""" callback called on a 'IN_UNMOUNT' event """
_LOGGER.info("unmount: %s %s %s" % (path, is_dir, threading.current_thread().name))
if __name__ == '__main__':
# Yeah, command line parsing
if len(sys.argv) < 2:
print "usage:", sys.argv[0], "directory"
sys.exit(1)
# we check if the provided string is a valid directory
path_to_watch = sys.argv[1]
if not os.path.isdir(path_to_watch):
print | path_to_watch, "is not a valid directory."
sys.exit(2)
# We instanciate our callbacks object
callbacks = MonitorCallbacks()
# we get a source tree monitor
stm = choose_source_tree_monitor()
# we set our callbacks
stm.set_events_callbacks(callbacks)
# we will use two threads to handle callbacks
stm.set_workers_number(2)
# we start the monitor
stm.start()
# after that, we can add the directory we want to watch
stm.add_source_dir(path_to_watch)
print "Watching directory", path_to_watch
print "Open a new terminal, and create/remove some folders and files in the", path_to_watch, "directory"
print "Ctrl-C to exit..."
try:
# without specific arguments, the next call will block forever
# open a terminal, and create/remove some folders and files
# this will last forever. use Ctrl-C to exit.
stm.process_events()
# see monitor-timeout-serial.py for an example with a timeout argument
except KeyboardInterrupt:
print "Stopping monitor."
finally:
# clean stop
stm.stop()
|
Luthaf/Zested | Zested.py | Python | bsd-2-clause | 91 | 0 | #!/u | sr/bin/env python3
from zested.main import main
if __name__ == "__main__" | :
main()
|
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/idlelib/idle_test/test_searchdialogbase.py | Python | gpl-2.0 | 5,860 | 0.001706 | '''Unittests for idlelib/SearchDialogBase.py
Coverage: 99%. The only thing not covered is inconsequential --
testing skipping of suite when self.needwrapbutton is false.
'''
import unittest
from test.support import requires
from tkinter import Tk, Toplevel, Frame, Label, BooleanVar, StringVar
from idlelib import SearchEngine as se
from idlelib import SearchDialogBase as sdb
from idlelib.idle_test.mock_idle import Func
from idlelib.idle_test.mock_tk import Var, Mbox
# The following could help make some tests gui-free.
# However, they currently make radiobutton tests fail.
##def setUpModule():
## # Replace tk objects used to initialize se.SearchEngine.
## se.BooleanVar = Var
## se.StringVar = Var
##
##def tearDownModule():
## se.BooleanVar = BooleanVar
## se.StringVar = StringVar
class SearchDialogBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
requires('gui')
cls.root = Tk()
@classmethod
def tearDownClass(cls):
cls.root.destroy()
del cls.root
def setUp(self):
self.engine = se.SearchEngine(self.root) # None also seems to work
self.dialog = sdb.SearchDialogBase(root=self.root, engine=self.engine)
def tearDown(self):
self.dialog.close()
def test_open_and_close(self):
# open calls create_widgets, which needs default_command
self.dialog.default_command = None
# Since text parameter of .open is not used in base class,
# pass dummy 'text' instead of tk.Text().
self.dialog.open('text')
self.assertEqual(self.dialog.top.state(), 'normal')
self.dialog.close()
self.assertEqual(self.dialog.top.state(), 'withdrawn')
self.dialog.open('text', searchphrase="hello")
self.assertEqual(self.dialog.ent.get(), 'hello')
self.dialog.close()
def test_create_widgets(self):
self.dialog.create_entries = Func()
self.dialog.create_option_buttons = Func()
self.dialog.create_other_buttons = Func()
self.dialog.create_command_buttons = Func()
self.dialog.default_command = None
self.dialog.create_widgets()
self.assertTrue(self.dialog.create_entries.called)
self.assertTrue(self.dialog.create_option_buttons.called)
self.assertTrue(self.dialog.create_other_buttons.called)
self.assertTrue(self.dialog.create_command_buttons.called)
def test_make_entry(self):
equal = self.assertEqual
self.dialog.row = 0
se | lf.dialog.top = Toplevel(self.root)
entry, label = self.dialog.make_entry("Test:", 'hello')
equal(label['text'], 'Test:')
self.assertIn(entry.get(), 'hello')
egi = entry.grid_info()
equal(int(egi['row']), 0)
equal(int(egi['column']), 1)
equal(int(egi['rowspan']), 1)
equal(int(egi['columns | pan']), 1)
equal(self.dialog.row, 1)
def test_create_entries(self):
self.dialog.row = 0
self.engine.setpat('hello')
self.dialog.create_entries()
self.assertIn(self.dialog.ent.get(), 'hello')
def test_make_frame(self):
self.dialog.row = 0
self.dialog.top = Toplevel(self.root)
frame, label = self.dialog.make_frame()
self.assertEqual(label, '')
self.assertIsInstance(frame, Frame)
frame, label = self.dialog.make_frame('testlabel')
self.assertEqual(label['text'], 'testlabel')
self.assertIsInstance(frame, Frame)
def btn_test_setup(self, meth):
self.dialog.top = Toplevel(self.root)
self.dialog.row = 0
return meth()
def test_create_option_buttons(self):
e = self.engine
for state in (0, 1):
for var in (e.revar, e.casevar, e.wordvar, e.wrapvar):
var.set(state)
frame, options = self.btn_test_setup(
self.dialog.create_option_buttons)
for spec, button in zip (options, frame.pack_slaves()):
var, label = spec
self.assertEqual(button['text'], label)
self.assertEqual(var.get(), state)
if state == 1:
button.deselect()
else:
button.select()
self.assertEqual(var.get(), 1 - state)
def test_create_other_buttons(self):
for state in (False, True):
var = self.engine.backvar
var.set(state)
frame, others = self.btn_test_setup(
self.dialog.create_other_buttons)
buttons = frame.pack_slaves()
for spec, button in zip(others, buttons):
val, label = spec
self.assertEqual(button['text'], label)
if val == state:
# hit other button, then this one
# indexes depend on button order
self.assertEqual(var.get(), state)
buttons[val].select()
self.assertEqual(var.get(), 1 - state)
buttons[1-val].select()
self.assertEqual(var.get(), state)
def test_make_button(self):
self.dialog.top = Toplevel(self.root)
self.dialog.buttonframe = Frame(self.dialog.top)
btn = self.dialog.make_button('Test', self.dialog.close)
self.assertEqual(btn['text'], 'Test')
def test_create_command_buttons(self):
self.dialog.create_command_buttons()
# Look for close button command in buttonframe
closebuttoncommand = ''
for child in self.dialog.buttonframe.winfo_children():
if child['text'] == 'close':
closebuttoncommand = child['command']
self.assertIn('close', closebuttoncommand)
if __name__ == '__main__':
unittest.main(verbosity=2, exit=2)
|
SARL-Engineering/ZScan_Processor | Framework/TrayNotifier/TrayNotifierCore.py | Python | lgpl-3.0 | 3,597 | 0.00278 | # coding=utf-8
#####################################
# Imports
#####################################
# Python native imports
from PyQt5 import QtCore, QtWidgets, QtGui
import logging
#####################################
# Global Variables
#####################################
UI_LOGO = "logo_small.jpg"
#####################################
# TrayNotifier Class Definition
#####################################
class TrayNotifier(QtCore.QObject):
def __init__(self, shared_objects):
super(TrayNotifier, self).__init__()
# ########## Reference to objects and main screen objects ##########
self.shared_objects = shared_objects
self.core_signals = self.shared_objects["core_signals"]
self.main_screen = self.shared_objects["screens"]["main_screen"]
# ########## Get the settings and logging instances ##########
self.settings = QtCore.QSettings()
self.logger = logging.getLogger("zscanprocessor")
# ########## Class Variables ##########
self.system_tray_icon = QtWidgets.QSystemTrayIcon(QtGui.QIcon(UI_LOGO))
self.system_tray_menu = QtWidgets.QMenu()
self.system_tray_menu.addAction("Show")
self.system_tray_menu.addAction("Exit")
# ########## Setup tray icon ##########
self.setup_tray_icon()
# ########## Setup program start signal connections ##########
self.setup_signals()
def setup_tray_icon(self):
self.system_tray_icon.setContextMenu(self.system_tray_menu)
self.system_tray_icon.show()
self.system_tray_icon.showMessage("Zebrafish Scan Processor", "Application started.\nUpdates will be " +
"shown here.", QtWidgets.QSystemTrayIcon.Information, 5000)
def connect_signals_and_slots(self):
pass
def show_informational_message(self, message, time=2000):
self.system_tray_icon.showMessage("Zebrafish Scan Processor", message, QtWidgets.QSystemTrayIcon.Information,
time)
def show_failure_message(self, message, time=10000):
self.system_tray_icon.showMessage("Zebrafish Scan Processor", message, QtWidgets.QSystemTrayIcon.Critical,
time)
def on_tray_menu_item_clicked(self, event):
if event == QtWidgets | .QSystemTrayIcon.Context: # Happens on right-click, ignore for tray menu instead
pass
elif event in [QtWidgets.QSystemTrayIcon.Trigger, QtWidgets.QSystemTrayIcon.Double | Click]:
self.main_screen.show()
self.main_screen.setWindowState(
self.main_screen.windowState() & ~QtCore.Qt.WindowMinimized | QtCore.Qt.WindowActive)
self.main_screen.activateWindow()
elif event.text() == "Show":
self.main_screen.show()
self.main_screen.setWindowState(
self.main_screen.windowState() & ~QtCore.Qt.WindowMinimized | QtCore.Qt.WindowActive)
self.main_screen.activateWindow()
elif event.text() == "Exit":
self.system_tray_icon.hide()
self.main_screen.exit_requested_signal.emit()
def on_kill_threads__slot(self):
pass
def setup_signals(self):
self.core_signals["kill"].connect(self.on_kill_threads__slot)
self.core_signals["connect_signals_and_slots"].connect(self.connect_signals_and_slots)
self.system_tray_icon.activated.connect(self.on_tray_menu_item_clicked)
self.system_tray_menu.triggered.connect(self.on_tray_menu_item_clicked)
|
haakenlid/django-extensions | django_extensions/management/commands/runserver_plus.py | Python | mit | 22,214 | 0.003061 | # -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import os
import re
import socket
import sys
import time
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.servers.basehttp import get_internal_wsgi_application
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.backends import utils
from django.db.migrations.executor import MigrationExecutor
from django.utils.autoreload import gen_filenames
from django_extensions.management.technical_response import \
null_technical_500_response
from django_extensions.management.utils import (
RedirectHandler, has_ipdb, setup_logger, signalcommand,
)
try:
if 'whitenoise.runserver_nostatic' in settings.INSTALLED_APPS:
USE_STATICFILES = False
elif 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
USE_STATICFILES = True
elif 'staticfiles' in settings.INSTALLED_APPS:
from staticfiles.handlers import StaticFilesHandler # noqa
USE_STATICFILES = True
else:
USE_STATICFILES = False
except ImportError:
USE_STATICFILES = False
naiveip_re = re.compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X)
DEFAULT_PORT = "8000"
DEFAULT_POLLER_RELOADER_INTERVAL = getattr(settings, 'RUNSERVERPLUS_POLLER_RELOADER_INTERVAL', 1)
DEFAULT_POLLER_RELOADER_TYPE = getattr(settings, 'RUNSERVERPLUS_POLLER_RELOADER_TYPE', 'auto')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Starts a lightweight Web server for development."
# Validation is called explicitly each time the server is reloaded.
requires_system_checks = False
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('addrport', nargs='?',
help='Optional port number, or ipaddr:port')
parser.add_argument('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.')
parser.add_argument('--noreload', action='store_false', dest='use_reloader', default=True,
help='Tells Django to NOT use the auto-reloader.')
parser.add_argument('--browser', action='store_true', dest='open_browser',
help='Tells Django to open a browser.')
parser.add_argument('--nothreading', action='store_false', dest='threaded',
help='Do not run in multithreaded mode.')
parser.add_argument('--threaded', action='store_true', dest='threaded',
help='Run in multithreaded mode.')
parser.add_argument('--output', dest='output_file', default=None,
help='Specifies an output file to send a copy of all messages (not flushed immediately).')
parser.add_argument('--print-sql', action='store_true', default=False,
help="Print SQL queries as they're executed")
cert_group = parser.add_mutually_exclusive_group()
cert_group.add_argument('--cert', dest='cert_path', action="store", type=str,
help='Deprecated alias for --cert-file option.')
cert_group.add_argument('--cert-file', dest='cert_path', action="store", type=str,
help='SSL .cert file path. If not provided path from --key-file will be selected. '
'Either --cert-file or --key-file must be provided to use SSL.')
parser.add_argument('--key-file', dest='key_file_path', action="store", type=str,
help='SSL .key file path. If not provided path from --cert-file will be selected. '
'Either --cert-file or --key-file must be provided to use SSL.')
parser.add_argument('--extra-file', dest='extra_files', action="append", type=str,
help='auto-reload whenever the given file changes too (can be specified multiple times)')
parser.add_argument('--reloader-interval', dest='reloader_interval', action="store", type=int, default=DEFAULT_POLLER_RELOADER_INTERVAL,
help='After how many seconds auto-reload should scan for updates in poller-mode [default=%s]' % DEFAULT_POLLER_RELOADER_INTERVAL)
parser.add_argument('--reloader-type', dest='reloader_type', action="store", type=str, default=DEFAULT_POLLER_RELOADER_TYPE,
help='Werkzeug reloader type [options are auto, watchdog, or stat, default=%s]' % DEFAULT_POLLER_RELOADER_TYPE)
parser.add_argument('--pdb', action='store_true', dest='pdb', default=False,
help='Drop into pdb shell at the start of any view.')
parser.add_argument('--ipdb', action='store_true', dest='ipdb', default=False,
help='Drop into ipdb shell at the start of any view.')
parser.add_argument('--pm', action='store_true', dest='pm', default=False,
help='Drop into (i)pdb shell if an exception is raised in a view.')
parser.add_argument('--startup-messages', dest='startup_messages', action="store", default='reload',
help='When to show startup messages: reload [default], once, always, never.')
parser.add_argument('--keep-meta-shutdown', dest='keep_meta_shutdown_func', action='store_true', default=False,
help="Keep request.META['werkzeug.server.shutdown'] function which is automatically removed "
"because Django debug pages tries to call the function and unintentionally shuts down "
"the Werkzeug server.")
parser.add_argument("--nopin", dest="nopin", action="store_true", def | ault=False,
help="Disable the PIN in werkzeug. USE IT WISELY!"),
if USE_STATICFILES:
pa | rser.add_argument('--nostatic', action="store_false", dest='use_static_handler', default=True,
help='Tells Django to NOT automatically serve static files at STATIC_URL.')
parser.add_argument('--insecure', action="store_true", dest='insecure_serving', default=False,
help='Allows serving static files even if DEBUG is False.')
@signalcommand
def handle(self, *args, **options):
addrport = options.get('addrport')
startup_messages = options.get('startup_messages', 'reload')
if startup_messages == "reload":
self.show_startup_messages = os.environ.get('RUNSERVER_PLUS_SHOW_MESSAGES')
elif startup_messages == "once":
self.show_startup_messages = not os.environ.get('RUNSERVER_PLUS_SHOW_MESSAGES')
elif startup_messages == "never":
self.show_startup_messages = False
else:
self.show_startup_messages = True
os.environ['RUNSERVER_PLUS_SHOW_MESSAGES'] = '1'
# Do not use default ending='\n', because StreamHandler() takes care of it
if hasattr(self.stderr, 'ending'):
self.stderr.ending = None
setup_logger(logger, self.stderr, filename=options.get('output_file', None)) # , fmt="[%(name)s] %(message)s")
logredirect = RedirectHandler(__name__)
# Redirect werkzeug log items
werklogger = logging.getLogger('werkzeug')
werklogger.setLevel(logging.INFO)
werklogger.addHandler(logredirect)
werklogger.propagate = False
if options.get("print_sql", False):
try:
import sqlparse
except ImportError:
sqlparse = None # noqa
try:
import pygments.lexers
import py |
CospanDesign/nysa-artemis-usb2-platform | artemis_usb2/spi_flash/serial_flash_manager.py | Python | gpl-2.0 | 5,915 | 0.007101 | # Copyright (c) 2010-2011, Emmanuel Blot <emmanuel.blot@free.fr>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Neotion nor the names of its contributors may
# be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL NEOTION BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED A | ND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import json
from array import array as Array
sys | .path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from spi import SpiController
#Get all types of the SPI Flash
import numonyx_flash
CMD_JEDEC_ID = 0x9F
#Exceptions
class SerialFlashNotSupported(Exception):
"""Exception thrown when a non-existing feature is invoked"""
class SerialFlashUnknownJedec(SerialFlashNotSupported):
"""Exception thrown when a JEDEC identifier is not recognized"""
def __init__(self, jedec):
from binascii import hexlify
SerialFlashNotSupported.__init__(self, "Unknown Flash Device: %s" % \
hexlify(jedec))
class SerialFlashTimeout(Exception):
"""Exception thrown when a flash command cannot be completed in a timely
manner"""
class SerialFlashValueError(ValueError):
"""Exception thrown when a parameter is out of range"""
class SerialFlashManager(object):
""" Serial Flash Manager
Automatically detects and instantiates the proper flash device class
based on the JEDEC identifier which is read out from the device.
"""
def __init__(self,
vendor,
product,
interface = 2,
prom_config_file = "proms.json",
debug = False):
self._ctrl = SpiController(silent_clock = False)
self._ctrl.configure(vendor, product, interface)
#Load the configuration file
name = prom_config_file
if not os.path.exists(prom_config_file):
name = os.path.join(os.path.dirname(__file__), prom_config_file)
f = open(name, "r")
self.devices = {}
proms = {}
proms = json.load(f)
if debug:
print "Loaded: %s" % name
print "Proms: %s" % str(proms)
for man in proms:
#change the string representation of hex to a real hex
man_hex = int(man, 16)
if debug:
print "man: 0x%02X" % man_hex
print "Manufacturer: %s" % proms[man]["Manufacturer"]
print "\tDevices:"
self.devices[man_hex] = {}
#Copy over the manufacturer's name
self.devices[man_hex]["Manufacturer"] = proms[man]["Manufacturer"]
self.devices[man_hex]["Devices"] = {}
for device in proms[man]["Devices"]:
dev_hex = int(device, 16)
if debug:
print "\t\tFound: 0x%02X" % dev_hex
self.devices[man_hex]["Devices"][dev_hex] = {}
self.devices[man_hex]["Devices"][dev_hex]["Description"] = proms[man]["Devices"][device]["Description"]
self.devices[man_hex]["Devices"][dev_hex]["capacity"] = int(proms[man]["Devices"][device]["capacity"], 16)
def get_flash_device(self, cs = 0, debug = False):
"""Obtain an instance of the detected flash device"""
spi = self._ctrl.get_port(cs)
jedec = SerialFlashManager.read_jedec_id(spi)
if debug:
print "Jedec: %s" % str(jedec)
if not jedec:
#It's likely the latency setting is too low if this conditio is
#Encountered
raise SerialFlashUnknownJedec("Unable to read JEDEC ID")
#Go through the PROM to find the identification of this device
maxlength = 3
ids = tuple([ord(x) for x in jedec[:maxlength]])
if debug:
print "SPI Values: 0x%02X 0x%02X 0x%02X" % (ids[0], ids[1], ids[2])
#print "Manufacturer: %s" % self.devices[ids[1]]["Manufacturer"]
#print "Values: %s" % str(self.devices[ids[1]]["Devices"])
print "Device: %s" % self.devices[ids[1]]["Devices"][ids[2]]["Description"]
if ids[1] == numonyx_flash.NumonyxFlashDevice.ID:
#print "Found Numonyx Device"
return numonyx_flash.NumonyxFlashDevice(spi, ids[2])
print "id: %s" % str(ids)
raise SerialFlashUnknownJedec(ids[1])
@staticmethod
def read_jedec_id(spi):
"""Read the flash device JEDEC Identifier (3 bytes)"""
jedec_cmd = Array('B', [CMD_JEDEC_ID])
spi_values = spi.exchange(jedec_cmd, 3)
#print "SPI Values: %s" % str(spi_values)
return spi.exchange(jedec_cmd, 3).tostring()
|
LearnEra/LearnEraPlaftform | cms/envs/common.py | Python | agpl-3.0 | 21,668 | 0.002538 | # -*- coding: utf-8 -*-
"""
This is the common settings file, intended to set sane defaults. If you have a
piece of configuration that's dependent on a set of feature flags being set,
then create a function that returns the calculated value based on the value of
FEATURES[...]. Modules that extend this one can change the feature
configuration in an environment specific config file and re-calculate those
values.
We should make a method that calls all these config methods so that you just
make one call at the end of your site-specific dev file to reset all the
dependent variables (like INSTALLED_APPS) for you.
Longer TODO:
1. Right now our treatment of static content in general and in particular
course-specific static content is haphazard.
2. We should have a more disciplined approach to feature flagging, even if it
just means that we stick them in a dict called FEATURES.
3. We need to handle configuration for multiple courses. This could be as
multiple sites, but we do need a way to map their data assets.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0611, W0614
import imp
import os
import sys
import lms.envs.common
# Although this module itself may not use these imported variables, other dependent modules may.
from lms.envs.common import (
USE_TZ, TECH_SUPPORT_EMAIL, PLATFORM_NAME, BUGS_EMAIL, DOC_STORE_CONFIG, ALL_LANGUAGES, WIKI_ENABLED, MODULESTORE,
update_module_store_settings, ASSET_IGNORE_REGEX
)
from path import path
from warnings import simplefilter
from lms.lib.xblock.mixin import LmsBlockMixin
from dealer.git import git
from xmodule.modulestore.edit_info import EditInfoMixin
############################ FE | ATURE C | ONFIGURATION #############################
FEATURES = {
'USE_DJANGO_PIPELINE': True,
'GITHUB_PUSH': False,
# for consistency in user-experience, keep the value of the following 3 settings
# in sync with the ones in lms/envs/common.py
'ENABLE_DISCUSSION_SERVICE': True,
'ENABLE_TEXTBOOK': True,
'ENABLE_STUDENT_NOTES': True,
'AUTH_USE_CERTIFICATES': False,
# email address for studio staff (eg to request course creation)
'STUDIO_REQUEST_EMAIL': '',
# Segment.io - must explicitly turn it on for production
'SEGMENT_IO': False,
# Enable URL that shows information about the status of various services
'ENABLE_SERVICE_STATUS': False,
# Don't autoplay videos for course authors
'AUTOPLAY_VIDEOS': False,
# If set to True, new Studio users won't be able to author courses unless
# edX has explicitly added them to the course creator group.
'ENABLE_CREATOR_GROUP': False,
# whether to use password policy enforcement or not
'ENFORCE_PASSWORD_POLICY': False,
# If set to True, Studio won't restrict the set of advanced components
# to just those pre-approved by edX
'ALLOW_ALL_ADVANCED_COMPONENTS': False,
# Turn off account locking if failed login attempts exceeds a limit
'ENABLE_MAX_FAILED_LOGIN_ATTEMPTS': False,
# Allow editing of short description in course settings in cms
'EDITABLE_SHORT_DESCRIPTION': True,
# Hide any Personally Identifiable Information from application logs
'SQUELCH_PII_IN_LOGS': False,
# Toggles the embargo functionality, which enable embargoing for particular courses
'EMBARGO': False,
# Toggles the embargo site functionality, which enable embargoing for the whole site
'SITE_EMBARGOED': False,
# Turn on/off Microsites feature
'USE_MICROSITES': False,
# Allow creating courses with non-ascii characters in the course id
'ALLOW_UNICODE_COURSE_ID': False,
# Prevent concurrent logins per user
'PREVENT_CONCURRENT_LOGINS': False,
# Turn off Advanced Security by default
'ADVANCED_SECURITY': False,
# Modulestore to use for new courses
'DEFAULT_STORE_FOR_NEW_COURSE': None,
}
ENABLE_JASMINE = False
############################# SET PATH INFORMATION #############################
PROJECT_ROOT = path(__file__).abspath().dirname().dirname() # /edx-platform/cms
REPO_ROOT = PROJECT_ROOT.dirname()
COMMON_ROOT = REPO_ROOT / "common"
LMS_ROOT = REPO_ROOT / "lms"
ENV_ROOT = REPO_ROOT.dirname() # virtualenv dir /edx-platform is in
GITHUB_REPO_ROOT = ENV_ROOT / "data"
sys.path.append(REPO_ROOT)
sys.path.append(PROJECT_ROOT / 'djangoapps')
sys.path.append(COMMON_ROOT / 'djangoapps')
sys.path.append(COMMON_ROOT / 'lib')
# For geolocation ip database
GEOIP_PATH = REPO_ROOT / "common/static/data/geoip/GeoIP.dat"
GEOIPV6_PATH = REPO_ROOT / "common/static/data/geoip/GeoIPv6.dat"
############################# WEB CONFIGURATION #############################
# This is where we stick our compiled template files.
import tempfile
MAKO_MODULE_DIR = os.path.join(tempfile.gettempdir(), 'mako_cms')
MAKO_TEMPLATES = {}
MAKO_TEMPLATES['main'] = [
PROJECT_ROOT / 'templates',
COMMON_ROOT / 'templates',
COMMON_ROOT / 'djangoapps' / 'pipeline_mako' / 'templates',
COMMON_ROOT / 'djangoapps' / 'pipeline_js' / 'templates',
]
for namespace, template_dirs in lms.envs.common.MAKO_TEMPLATES.iteritems():
MAKO_TEMPLATES['lms.' + namespace] = template_dirs
TEMPLATE_DIRS = MAKO_TEMPLATES['main']
EDX_ROOT_URL = ''
LOGIN_REDIRECT_URL = EDX_ROOT_URL + '/signin'
LOGIN_URL = EDX_ROOT_URL + '/signin'
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.i18n',
'django.contrib.auth.context_processors.auth', # this is required for admin
'django.core.context_processors.csrf',
'dealer.contrib.django.staff.context_processor', # access git revision
'contentstore.context_processors.doc_url',
)
# use the ratelimit backend to prevent brute force attacks
AUTHENTICATION_BACKENDS = (
'ratelimitbackend.backends.RateLimitModelBackend',
)
LMS_BASE = None
# These are standard regexes for pulling out info like course_ids, usage_ids, etc.
# They are used so that URLs with deprecated-format strings still work.
from lms.envs.common import (
COURSE_KEY_PATTERN, COURSE_ID_PATTERN, USAGE_KEY_PATTERN, ASSET_KEY_PATTERN
)
#################### CAPA External Code Evaluation #############################
XQUEUE_INTERFACE = {
'url': 'http://localhost:8888',
'django_auth': {'username': 'local',
'password': 'local'},
'basic_auth': None,
}
################################# Deprecation warnings #####################
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
simplefilter('ignore')
################################# Middleware ###################################
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'staticfiles.finders.FileSystemFinder',
'staticfiles.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'request_cache.middleware.RequestCache',
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'method_override.middleware.MethodOverrideMiddleware',
# Instead of AuthenticationMiddleware, we use a cache-backed version
'cache_toolbox.middleware.CacheBackedAuthenticationMiddleware',
'student.middleware.UserStandingMiddleware',
'contentserver.middleware.StaticContentServer',
'crum.CurrentRequestUserMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'track.middleware.TrackMiddleware',
# Allows us to dark-launch particular languages
'dark_lang.middleware.DarkLangMiddleware',
'embargo.middleware.EmbargoMiddleware',
# Detects user-requested locale from ' |
cesardeazevedo/sniffle | sniffle/wsgi.py | Python | mit | 391 | 0.005115 | """
It exposes the WSGI callable as a module-level v | ariable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sniffle.settings")
from django.core.wsgi import get_wsgi_application
from dj_static imp | ort Cling
application = Cling(get_wsgi_application())
|
TeaBough/calico-docker | calico_containers/tests/unit/container_test.py | Python | apache-2.0 | 39,497 | 0.000709 | # Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mock import patch, Mock, call
from nose_parameterized import parameterized
from netaddr import IPAddress, IPNetwork
from subprocess import CalledProcessError
from calico_ctl.bgp import *
from calico_ctl import container
from calico_ctl import utils
from pycalico.datastore_datatypes import Endpoint, | IPPool
class TestContainer(unittest.TestCase):
@parameteriz | ed.expand([
({'<CONTAINER>':'node1', 'ip':1, 'add':1, '<IP>':'127.a.0.1'}, True),
({'<CONTAINER>':'node1', 'ip':1, 'add':1, '<IP>':'aa:bb::zz'}, True),
({'add':1, '<CONTAINER>':'node1', '<IP>':'127.a.0.1'}, True),
({'add':1, '<CONTAINER>':'node1', '<IP>':'aa:bb::zz'}, True)
])
def test_validate_arguments(self, case, sys_exit_called):
"""
Test validate_arguments for calicoctl container command
"""
with patch('sys.exit', autospec=True) as m_sys_exit:
# Call method under test
container.validate_arguments(case)
# Assert method exits if bad input
self.assertEqual(m_sys_exit.called, sys_exit_called)
@patch('calico_ctl.container.enforce_root', autospec=True)
@patch('calico_ctl.container.get_container_info_or_exit', autospec=True)
@patch('calico_ctl.container.client', autospec=True)
@patch('calico_ctl.container.get_pool_or_exit', autospec=True)
@patch('calico_ctl.container.netns', autospec=True)
def test_container_add(self, m_netns, m_get_pool_or_exit, m_client,
m_get_container_info_or_exit, m_enforce_root):
"""
Test container_add method of calicoctl container command
"""
# Set up mock objects
m_get_container_info_or_exit.return_value = {
'Id': 666,
'State': {'Running': 1, 'Pid': 'Pid_info'},
'HostConfig': {'NetworkMode': "not host"}
}
m_client.get_endpoint.side_effect = KeyError
m_client.get_default_next_hops.return_value = 'next_hops'
# Call method under test
test_return = container.container_add('container1', '1.1.1.1', 'interface')
# Assert
m_enforce_root.assert_called_once_with()
m_get_container_info_or_exit.assert_called_once_with('container1')
m_client.get_endpoint.assert_called_once_with(
hostname=utils.hostname,
orchestrator_id=utils.ORCHESTRATOR_ID,
workload_id=666
)
m_get_pool_or_exit.assert_called_once_with(IPAddress('1.1.1.1'))
m_client.get_default_next_hops.assert_called_once_with(utils.hostname)
# Check an enpoint object was returned
self.assertTrue(isinstance(test_return, Endpoint))
self.assertTrue(m_netns.create_veth.called)
self.assertTrue(m_netns.move_veth_into_ns.called)
self.assertTrue(m_netns.add_ip_to_ns_veth.called)
self.assertTrue(m_netns.add_ns_default_route.called)
self.assertTrue(m_netns.get_ns_veth_mac.called)
self.assertTrue(m_client.set_endpoint.called)
@patch('calico_ctl.container.enforce_root', autospec=True)
@patch('calico_ctl.container.get_container_info_or_exit', autospec=True)
@patch('calico_ctl.container.client', autospec=True)
def test_container_add_container_host_ns(self, m_client,
m_get_container_info_or_exit, m_enforce_root):
"""
Test container_add method of calicoctl container command when the
container shares the host namespace.
"""
# Set up mock objects
m_get_container_info_or_exit.return_value = {
'Id': 666,
'State': {'Running': 1, 'Pid': 'Pid_info'},
'HostConfig': {'NetworkMode': 'host'}
}
m_client.get_endpoint.side_effect = KeyError
# Call method under test expecting a SystemExit
self.assertRaises(SystemExit, container.container_add,
'container1', '1.1.1.1', 'interface')
m_enforce_root.assert_called_once_with()
@patch('calico_ctl.container.enforce_root', autospec=True)
@patch('calico_ctl.container.get_container_info_or_exit', autospec=True)
@patch('calico_ctl.container.client', autospec=True)
@patch('calico_ctl.container.get_pool_or_exit', autospec=True)
def test_container_add_existing_container(
self, m_get_pool_or_exit, m_client, m_get_container_info_or_exit,
m_enforce_root):
"""
Test container_add when a container already exists.
Do not raise an exception when the client tries 'get_endpoint'
Assert that the system then exits and all expected calls are made
"""
# Call method under test expecting a SystemExit
self.assertRaises(SystemExit, container.container_add,
'container1', '1.1.1.1', 'interface')
# Assert only expected calls were made
self.assertTrue(m_enforce_root.called)
self.assertTrue(m_get_container_info_or_exit.called)
self.assertTrue(m_client.get_endpoint.called)
self.assertFalse(m_get_pool_or_exit.called)
@patch('calico_ctl.container.enforce_root', autospec=True)
@patch('calico_ctl.container.get_container_info_or_exit', autospec=True)
@patch('calico_ctl.container.client', autospec=True)
@patch('calico_ctl.container.get_pool_or_exit', autospec=True)
def test_container_add_container_not_running(
self, m_get_pool_or_exit, m_client,
m_get_container_info_or_exit, m_enforce_root):
"""
Test container_add when a container is not running
get_container_info_or_exit returns a running state of value 0
Assert that the system then exits and all expected calls are made
"""
# Set up mock object
m_client.get_endpoint.side_effect = KeyError
m_get_container_info_or_exit.return_value = {
'Id': 666,
'State': {'Running': 0, 'Pid': 'Pid_info'}
}
# Call method under test expecting a SystemExit
self.assertRaises(SystemExit, container.container_add,
'container1', '1.1.1.1', 'interface')
# Assert only expected calls were made
self.assertTrue(m_enforce_root.called)
self.assertTrue(m_get_container_info_or_exit.called)
self.assertTrue(m_client.get_endpoint.called)
self.assertFalse(m_get_pool_or_exit.called)
@patch('calico_ctl.container.enforce_root', autospec=True)
@patch('calico_ctl.container.get_container_info_or_exit', autospec=True)
@patch('calico_ctl.container.client', autospec=True)
@patch('calico_ctl.container.get_pool_or_exit', autospec=True)
def test_container_add_not_ipv4_configured(
self, m_get_pool_or_exit, m_client, m_get_container_info_or_exit,
m_enforce_root):
"""
Test container_add when the client cannot obtain next hop IPs
client.get_default_next_hops returns an empty dictionary, which produces
a KeyError when trying to determine the IP.
Assert that the system then exits and all expected calls are made
"""
# Set up mock objects
m_client.get_endpoint.side_effect = KeyError
m_client.get_default_next_hops.return_value = {}
# Call method under test expecting a SystemExit
self.assertRaises(SystemExit, container.container_add,
'container1', '1.1.1.1', 'interface')
# Assert only expected calls were made
self.assertTrue(m_enforce_root.called)
|
Nikolay-Kha/PyCNC | cnc/sensors/thermistor.py | Python | mit | 1,948 | 0 | """
This module reads temperature from NTC thermistor connected to ads111x.
Circuit diagram for this module should be like this:
Vcc
---
|
|
.-.
| |
| | R1
'-'
|
o----------------o------------> ads111x input
| |
| | |
.-./ |
| / + |
|/| R0 NTC ----- 10 uF
/-' -----
/ | |
| |
_|_ _|_
GND GND
Since ads111x uses internal reference voltage, Vcc should be well regulated.
"""
from __future__ import division
import math
import time
try:
import ads111x as adc
except ImportError:
print("---- ads111x is not detected ----")
adc = None
CELSIUS_TO_KELVIN = 273.15
# Circuit parameters, resistance in Ohms, temperature in Celsius.
# Beta is thermistor parameter:
# https://en.wikipedia.org/wiki/Thermistor#B_or_.CE.B2_parameter_equation
Vcc = 3.3
R0 = 100000
T0 = 25
BETA = 4092
R1 = 4700
Rinf = R0 * math.exp(-BETA / (T0 + CELSIUS_TO_KELVIN))
def get_temperature(channel):
"""
Measure temperature on specified channel.
Can raise OSError or IOError on any issue with sensor.
:param channel: ads111x channel.
:return: temperature in Celsius
"""
if adc is None:
raise IOError("ads111x is not connected")
v = adc.measure(channel)
if v >= Vcc:
raise IOError("Thermistor not connected")
if v <= 0:
raise IOError("Short circuit")
r = v * R1 / (Vcc - v)
return (BETA / math.log(r / Rinf)) - CELSIUS_TO_KELVIN
# for test purpose
if __name__ == "__main__":
while True:
for i in range( | 0, 4):
try:
t = get_temperature(i)
except (IOError, OSError):
t = None
print("T{}={}".forma | t(i, t))
print("-----------------------------")
time.sleep(0.5)
|
purism/pdak | dak/dak.py | Python | gpl-2.0 | 8,306 | 0.002047 | #!/usr/bin/env python
"""
Wrapper to launch dak functionality
G{importgraph}
"""
# Copyright (C) 2005, 2006 Anthony Towns <ajt@debian.org>
# Copyright (C) 2006 James Troup <james@nocrew.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
# well I don't know where you're from but in AMERICA, there's a little
# thing called "abstinent until proven guilty."
# -- http://harrietmiers.blogspot.com/2005/10/wow-i-feel-loved.html
# (if James had a blog, I bet I could find a funny quote in it to use!)
################################################################################
import os
import sys
import traceback
import daklib.utils
from daklib.daklog import Logger
from daklib.config import Config
from daklib.dak_exceptions import CantOpenError
################################################################################
def init():
"""Setup the list of modules and brief explanation of what they
do."""
functionality = [
("ls",
"Show which suites packages are in"),
("override",
"Query/change the overrides"),
("check-archive",
"Archive sanity checks"),
("queue-report",
"Produce a report on NEW and BYHAND packages"),
("show-new",
"Output html for packages in NEW"),
("show-deferred",
"Output html and symlinks for packages in DEFERRED"),
("graph",
"Output graphs of number of packages in various queues"),
("rm",
"Remove packages from suites"),
("process-new",
"Process NEW and BYHAND packages"),
("process-upload",
"Process packages in queue/unchecked"),
("process-commands",
"Process command files (*.dak-commands)"),
("process-policy",
"Process packages in policy queues from COMMENTS files"),
("dominate",
"Remove obsolete source and binary associations from suites"),
("export",
"Export uploads from policy queues"),
("export-suite",
"export a suite to a flat directory structure"),
("make-pkg-file-mapping",
"Generate package <-> file mapping"),
("generate-releases",
"Generate Release files"),
("generate-packages-sources2",
"Generate Packages/Sources files"),
("contents",
"Generate content files"),
("metadata",
"Load data for packages/sources files"),
("generate-index-diffs",
"Generate .diff/Index files"),
("clean-suites",
"Clean unused/superseded packages from the archive"),
("manage-build-queues",
"Clean and update metadata for build queues"),
("manage-debug-suites",
"Clean obsolete packages from debug suites"),
("manage-external-signature-requests",
"Maintain external signature requests"),
("clean-queues",
"Clean cruft from incoming"),
("archive-dedup-pool",
"De-duplicates files in the pool directory"),
("transitions",
"Manage the release transition file"),
("check-overrides",
"Override cruft checks"),
("control-overrides",
"Manipulate/list override entries in bulk"),
("control-suite",
"Manipulate suites in bulk"),
("update-suite",
"Update suite with packages from a different suite"),
("cruft-report",
"Check for obsolete or duplicated packages"),
("auto-decruft",
"Clean cruft without reverse dependencies automatically"),
("examine-package",
"Show information useful for NEW processing"),
("import",
"Import existing source and binary packages"),
("import-repository",
"Import packages from another repository"),
("import-keyring",
"Populate fingerprint/uid table based on a new/updated keyring"),
("import-users-from-passwd",
"Sync PostgreSQL users with passwd file"),
("acl",
"Manage upload ACLs"),
("admin",
"Perform administration on the dak database"),
("update-db",
"Updates databae schema to latest revision"),
("init-dirs",
"Initial setup of the archive"),
("make-maintainers",
"Generates Maintainers file for BTS etc"),
("make-overrides",
"Generates override files"),
("new-security-install",
"New way to install a security upload into the archive"),
("stats",
"Generate statistics"),
("bts-categorize",
"Categorize uncategorized bugs filed against ftp.debian.org"),
("add-user",
"Add a user to the archive"),
("make-changelog",
"Generate changelog between two suites"),
("copy-installer",
"Copies the installer from one suite to another"),
("external-overrides",
"Modify external overrides"),
]
return functionality
################################################################################
def usage(functionality, exit_code=0):
"""Print a usage message and exit with 'exit_code'."""
print """Usage: dak COMMAND [...]
Run DAK commands. (Will also work if invoked as COMMAND.)
Available commands:"""
for (command, description) in functionality:
print " %-23s %s" % (command, description)
sys.exit(exit_code)
################################################################################
def main():
"""Launch dak functionality."""
try:
logger = Logger('dak top-level', print_starting=False)
except CantOpenError:
logger = None
functionality = init()
modules = [ command for (command, _) in functionality ]
if len(sys.argv) == 0:
daklib.utils.fubar("err, argc == 0? how is that possible?")
elif (len(sys.argv) == 1
or (len(sys.argv) == 2 and
(sys.argv[1] == "--help" or sys.argv[1] == "-h"))):
usage(functionality)
# First see if we were invoked with/as the name of a module
cmdname = sys.argv[0]
cmdname = cmdname[cmdname.rfind("/")+1:]
if cmdname in modules:
pass
# Otherwise the argument is the module
else:
cmdname = sys.argv[1]
sys.argv = [sys.argv[0] + " " + sys.argv[1]] + sys.argv[2:]
if cmdname not in modules:
match = []
for name in modules:
if name.startswith(cmdname):
match.append(name)
if len(match) == 1:
cmdname = match[0]
elif len(match) > 1:
daklib.utils.warn("ambiguous command '%s' - could be %s" \
% (cmdname, ", ".join(match)))
usage(functionality, 1)
else:
daklib.utils.warn("unknown command '%s'" % (cmdname))
usag | e(functionality, 1)
# Invoke the module
module = __import__(cmdname.replace("-","_"))
try:
module.main()
except KeyboardInterrupt:
msg = 'KeyboardInterrupt caught; exiting'
print msg
if logger:
logger.log([msg])
sys.exit(1)
except SystemExit:
raise
except:
if logger:
for line in traceback.format_exc().split('\n')[:- | 1]:
logger.log(['exception', line])
raise
###################################################### |
devilry/devilry-django | devilry/devilry_account/crapps/account/select_language.py | Python | bsd-3-clause | 2,654 | 0.00113 | # import pycountry as pycountry
from django.conf import settings
from django.http import HttpResponseRedirect, Http404
from django.utils import translation
from django.views.generic import TemplateView
from devilry.devilry_account.crapps.account import utils
from devilry.devilry_account.models import User
class LanguageInfo(object):
def __init__(self, languagecode, language=None):
self.languagecode = languagecode
self.language = language or self.__get_language()
def __get_language(self):
return utils.get_language_name(languagecode=self.languagecode)
class SelectLanguageView(TemplateView):
template_name = 'devilry_account/crapps/account/select_language.django.html'
def post(self, request, *args, **kwargs):
selected_languagecode = self.__get_selected_languagecode(data=request.POST)
if request.user.is_authenticated:
self.__update_user_language_code(request=request, languagecode=selected_languagecode)
request.session['SELECTED_LANGUAGE_CODE'] = selecte | d_languagecode
else:
request.session['SELECTED_LANGUAGE_CODE'] = selected_languagecode
return HttpResponseRedirect('/account/')
def __update_user_language_code(self, request, languagecode):
try:
user = User.objects.get(id=request.user.id)
except User.DoesNotExist:
raise Http404()
else:
user.languagecode = languagecode
| user.full_clean()
user.save()
def __get_selected_languagecode(self, data):
selected_languagecode = data.get('selected_language', None)
if not selected_languagecode:
return translation.get_language()
languagecodes = [language[0] for language in settings.LANGUAGES]
if selected_languagecode in languagecodes:
return selected_languagecode
else:
return translation.get_language()
def get_context_data(self, **kwargs):
context = super(SelectLanguageView, self).get_context_data(**kwargs)
context['languages'] = self.__get_languages_info()
return context
def __get_languages_info(self):
language_objects_info_list = []
for language in settings.LANGUAGES:
language_info = LanguageInfo(
languagecode=language[0],
language=language[1]
)
if language[0] == translation.get_language():
language_objects_info_list.insert(0, language_info)
else:
language_objects_info_list.append(language_info)
return language_objects_info_list
|
projecthamster/hamster-dbus | tests/storage/common.py | Python | gpl-3.0 | 748 | 0 | # -*- coding: utf-8 -*-
"""Module to provide a common base ``TestCase``."""
from __future__ import absolute_import, unicode_literals
import dbusmock
class HamsterDBusManagerTestCase(dbusmock.DBusTestCase):
"""
Common testcase for storage backend unittests.
This test case makes sure tests are run | against a new private session bus
instance and provides easy access to the underlying dbus connection.
"""
@classmethod
def setUpClass(cls):
"""Setup new private session bus."""
cls.start_session_bus()
cls.dbus_con = cls.get | _dbus()
def tearDown(self):
"""Terminate any service launched by the test case."""
self.service_mock.terminate()
self.service_mock.wait()
|
mvendra/mvtools | download_url.py | Python | mit | 992 | 0.00504 | #!/usr/bin/env python3
import sys
i | mpor | t os
import urllib.request
import path_utils
# credit: https://stackoverflow.com/questions/22676/how-to-download-a-file-over-http
def download_url(source_url, target_path):
if os.path.exists(target_path):
return False, "Target path [%s] already exists" % target_path
contents = None
try:
with urllib.request.urlopen(source_url) as f:
contents = f.read().decode("utf8")
except urllib.error.HTTPError as httpex:
return False, "Downloading failed: [%s]" % httpex
with open(target_path, "w") as f:
f.write(contents)
return True, None
def puaq():
print("Usage: %s source_url target_path" % path_utils.basename_filtered(__file__))
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) < 3:
puaq()
source_url = sys.argv[1]
target_path = sys.argv[2]
v, r = download_url(source_url, target_path)
if not v:
print(r)
sys.exit(1)
|
mikepii/retail_store_foot_traffic_monitor | tracking/management/commands/checkin.py | Python | apache-2.0 | 1,191 | 0.004198 | import datetime
from optparse import make_option
from django.conf import settings
from djang | o.core.management.base import BaseCommand, CommandError
from six import print_
import bigbro
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make | _option('--store',
dest='store',
help='Watch log subdirectory'),
)
help = 'Log employee RFID check-ins.'
def handle(self, *args, **options):
print_('Note: RFID scanner must be set up for keyboard input (see README).')
print_('Waiting for RFID input. Press Ctrl+C to quit.')
date_today = datetime.datetime.now().strftime('%Y-%m-%d')
log_location = bigbro.log_location(options['store'], date_today, 'checkin')
with open(log_location, 'a') as outf:
while True:
try:
rfid = raw_input()
time_f = datetime.datetime.now().strftime(settings.LOG_TIME_FMT)
print_(time_f, rfid, sep='\t', file=outf)
except KeyboardInterrupt:
print_('')
print_('Quitting...')
break
|
google/material-design-icons | update/venv/lib/python3.9/site-packages/pip/_internal/utils/models.py | Python | apache-2.0 | 1,329 | 0 | """Utilities for defining models
"""
import operator
from typing import Any, Callable, Type
class KeyBasedCompareMixin:
"""Provides comparison capabilities that is based on a key"""
__slots__ = ["_compare_key", "_defining_class"]
def __init__(self, key, defining_class):
# type: (Any, Type[KeyBasedCompareMixin]) -> None
self._compare_key = key
self._defining_class = defining_class
def __hash__(self):
# type: () -> int
return hash(self._compare_key)
def __lt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__lt__)
def __le__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__le__)
def __gt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__gt__)
def __ge__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__ge__)
| def __eq__(self, other):
# type: (Any) -> bool
return self. | _compare(other, operator.__eq__)
def _compare(self, other, method):
# type: (Any, Callable[[Any, Any], bool]) -> bool
if not isinstance(other, self._defining_class):
return NotImplemented
return method(self._compare_key, other._compare_key)
|
ASMlover/study | python/proto/pyRpc/logger.py | Python | bsd-2-clause | 1,837 | 0.000544 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# " | AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQ | UENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import logging
class LoggerMgr(object):
logger_table = {}
@staticmethod
def get_logger(name):
if name in LoggerMgr.logger_table:
return LoggerMgr.logger_table[name]
logger = logging.getLogger(name)
logger.setLevel(logger.DEBUG)
console_handler = logging.StreamHandler()
logger.addHandler(console_handler)
LoggerMgr.logger_table[name] = logger
return logger
|
kirbyfan64/hy | setup.py | Python | mit | 3,580 | 0 | #!/usr/bin/env python
# Copyright (c) 2012, 2013 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limita | tion
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRAN | TIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import os
import re
import sys
from setuptools import find_packages, setup
PKG = "hy"
VERSIONFILE = os.path.join(PKG, "version.py")
verstr = "unknown"
try:
verstrline = open(VERSIONFILE, "rt").read()
except EnvironmentError:
pass # Okay, there is no version file.
else:
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
__version__ = mo.group(1)
else:
msg = "if %s.py exists, it is required to be well-formed" % VERSIONFILE
raise RuntimeError(msg)
long_description = """Hy is a Python <--> Lisp layer. It helps
make things work nicer, and lets Python and the Hy lisp variant play
nice together. """
install_requires = ['rply>=0.7.0', 'astor>=0.3']
if sys.version_info[:2] < (2, 7):
install_requires.append('argparse>=1.2.1')
install_requires.append('importlib>=1.0.2')
if os.name == 'nt':
install_requires.append('pyreadline==2.0')
setup(
name=PKG,
version=__version__,
install_requires=install_requires,
entry_points={
'console_scripts': [
'hy = hy.cmdline:hy_main',
'hyc = hy.cmdline:hyc_main',
'hy2py = hy.cmdline:hy2py_main',
]
},
packages=find_packages(exclude=['tests*']),
package_data={
'hy.contrib': ['*.hy'],
'hy.core': ['*.hy'],
},
author="Paul Tagliamonte",
author_email="tag@pault.ag",
long_description=long_description,
description='Lisp and Python love each other.',
license="Expat",
url="http://hylang.org/",
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: DFSG approved",
"License :: OSI Approved :: MIT License", # Really "Expat". Ugh.
"Operating System :: OS Independent",
"Programming Language :: Lisp",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Libraries",
]
)
|
gsarma/ChannelWorm | scripts/iv_curve_from_model.py | Python | mit | 606 | 0.006601 | import subprocess, os
if os.path.bas | ename(os.getcwd()) != 'scripts':
print("Run this from the scripts directory")
exit()
#make a "sandox" for the large number of files being generated
try:
os.mkdir('simfiles')
except OSError:
| pass
os.chdir('simfiles')
subprocess.call(['pynml-channelanalysis', '-temperature', '34', '-minV', '-55', '-maxV', '80', '-duration', '600', '-clampBaseVoltage', '-55', '-clampDuration', '580', '-stepTargetVoltage', '10', '-erev', '50', '-caConc', '0.001', '-clampDuration', '600', '-stepTargetVoltage', '5', '-ivCurve', '../../models/EGL-19.channel.nml'])
|
bnaul/scikit-learn | sklearn/feature_selection/tests/test_rfe.py | Python | bsd-3-clause | 16,445 | 0 | """
Testing Recursive feature elimination
"""
from operator import attrgetter
import pytest
import numpy as np
from numpy.testing import assert_array_almost_equal, assert_array_equal
from scipy import sparse
from sklearn.feature_selection import RFE, RFECV
from sklearn.datasets import load_iris, make_friedman1
from sklearn.metrics import zero_one_loss
from sklearn.svm im | port SVC, SVR, LinearSVR
from sk | learn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GroupKFold
from sklearn.compose import TransformedTargetRegressor
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state
from sklearn.utils._testing import ignore_warnings
from sklearn.metrics import make_scorer
from sklearn.metrics import get_scorer
class MockClassifier:
"""
Dummy classifier to test recursive feature elimination
"""
def __init__(self, foo_param=0):
self.foo_param = foo_param
def fit(self, X, y):
assert len(X) == len(y)
self.coef_ = np.ones(X.shape[1], dtype=np.float64)
return self
def predict(self, T):
return T.shape[0]
predict_proba = predict
decision_function = predict
transform = predict
def score(self, X=None, y=None):
return 0.
def get_params(self, deep=True):
return {'foo_param': self.foo_param}
def set_params(self, **params):
return self
def _get_tags(self):
return {}
def test_rfe_features_importance():
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
y = iris.target
clf = RandomForestClassifier(n_estimators=20,
random_state=generator, max_depth=2)
rfe = RFE(estimator=clf, n_features_to_select=4, step=0.1)
rfe.fit(X, y)
assert len(rfe.ranking_) == X.shape[1]
clf_svc = SVC(kernel="linear")
rfe_svc = RFE(estimator=clf_svc, n_features_to_select=4, step=0.1)
rfe_svc.fit(X, y)
# Check if the supports are equal
assert_array_equal(rfe.get_support(), rfe_svc.get_support())
def test_rfe():
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
X_sparse = sparse.csr_matrix(X)
y = iris.target
# dense model
clf = SVC(kernel="linear")
rfe = RFE(estimator=clf, n_features_to_select=4, step=0.1)
rfe.fit(X, y)
X_r = rfe.transform(X)
clf.fit(X_r, y)
assert len(rfe.ranking_) == X.shape[1]
# sparse model
clf_sparse = SVC(kernel="linear")
rfe_sparse = RFE(estimator=clf_sparse, n_features_to_select=4, step=0.1)
rfe_sparse.fit(X_sparse, y)
X_r_sparse = rfe_sparse.transform(X_sparse)
assert X_r.shape == iris.data.shape
assert_array_almost_equal(X_r[:10], iris.data[:10])
assert_array_almost_equal(rfe.predict(X), clf.predict(iris.data))
assert rfe.score(X, y) == clf.score(iris.data, iris.target)
assert_array_almost_equal(X_r, X_r_sparse.toarray())
@pytest.mark.parametrize("n_features_to_select", [-1, 2.1])
def test_rfe_invalid_n_features_errors(n_features_to_select):
clf = SVC(kernel="linear")
iris = load_iris()
rfe = RFE(estimator=clf, n_features_to_select=n_features_to_select,
step=0.1)
msg = f"n_features_to_select must be .+ Got {n_features_to_select}"
with pytest.raises(ValueError, match=msg):
rfe.fit(iris.data, iris.target)
def test_rfe_percent_n_features():
# test that the results are the same
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
y = iris.target
# there are 10 features in the data. We select 40%.
clf = SVC(kernel="linear")
rfe_num = RFE(estimator=clf, n_features_to_select=4, step=0.1)
rfe_num.fit(X, y)
rfe_perc = RFE(estimator=clf, n_features_to_select=0.4, step=0.1)
rfe_perc.fit(X, y)
assert_array_equal(rfe_perc.ranking_, rfe_num.ranking_)
assert_array_equal(rfe_perc.support_, rfe_num.support_)
def test_rfe_mockclassifier():
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
y = iris.target
# dense model
clf = MockClassifier()
rfe = RFE(estimator=clf, n_features_to_select=4, step=0.1)
rfe.fit(X, y)
X_r = rfe.transform(X)
clf.fit(X_r, y)
assert len(rfe.ranking_) == X.shape[1]
assert X_r.shape == iris.data.shape
def test_rfecv():
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
y = list(iris.target) # regression test: list should be supported
# Test using the score function
rfecv = RFECV(estimator=SVC(kernel="linear"), step=1)
rfecv.fit(X, y)
# non-regression test for missing worst feature:
assert len(rfecv.grid_scores_) == X.shape[1]
assert len(rfecv.ranking_) == X.shape[1]
X_r = rfecv.transform(X)
# All the noisy variable were filtered out
assert_array_equal(X_r, iris.data)
# same in sparse
rfecv_sparse = RFECV(estimator=SVC(kernel="linear"), step=1)
X_sparse = sparse.csr_matrix(X)
rfecv_sparse.fit(X_sparse, y)
X_r_sparse = rfecv_sparse.transform(X_sparse)
assert_array_equal(X_r_sparse.toarray(), iris.data)
# Test using a customized loss function
scoring = make_scorer(zero_one_loss, greater_is_better=False)
rfecv = RFECV(estimator=SVC(kernel="linear"), step=1, scoring=scoring)
ignore_warnings(rfecv.fit)(X, y)
X_r = rfecv.transform(X)
assert_array_equal(X_r, iris.data)
# Test using a scorer
scorer = get_scorer('accuracy')
rfecv = RFECV(estimator=SVC(kernel="linear"), step=1, scoring=scorer)
rfecv.fit(X, y)
X_r = rfecv.transform(X)
assert_array_equal(X_r, iris.data)
# Test fix on grid_scores
def test_scorer(estimator, X, y):
return 1.0
rfecv = RFECV(estimator=SVC(kernel="linear"), step=1, scoring=test_scorer)
rfecv.fit(X, y)
assert_array_equal(rfecv.grid_scores_, np.ones(len(rfecv.grid_scores_)))
# In the event of cross validation score ties, the expected behavior of
# RFECV is to return the FEWEST features that maximize the CV score.
# Because test_scorer always returns 1.0 in this example, RFECV should
# reduce the dimensionality to a single feature (i.e. n_features_ = 1)
assert rfecv.n_features_ == 1
# Same as the first two tests, but with step=2
rfecv = RFECV(estimator=SVC(kernel="linear"), step=2)
rfecv.fit(X, y)
assert len(rfecv.grid_scores_) == 6
assert len(rfecv.ranking_) == X.shape[1]
X_r = rfecv.transform(X)
assert_array_equal(X_r, iris.data)
rfecv_sparse = RFECV(estimator=SVC(kernel="linear"), step=2)
X_sparse = sparse.csr_matrix(X)
rfecv_sparse.fit(X_sparse, y)
X_r_sparse = rfecv_sparse.transform(X_sparse)
assert_array_equal(X_r_sparse.toarray(), iris.data)
# Verifying that steps < 1 don't blow up.
rfecv_sparse = RFECV(estimator=SVC(kernel="linear"), step=.2)
X_sparse = sparse.csr_matrix(X)
rfecv_sparse.fit(X_sparse, y)
X_r_sparse = rfecv_sparse.transform(X_sparse)
assert_array_equal(X_r_sparse.toarray(), iris.data)
def test_rfecv_mockclassifier():
generator = check_random_state(0)
iris = load_iris()
X = np.c_[iris.data, generator.normal(size=(len(iris.data), 6))]
y = list(iris.target) # regression test: list should be supported
# Test using the score function
rfecv = RFECV(estimator=MockClassifier(), step=1)
rfecv.fit(X, y)
# non-regression test for missing worst feature:
assert len(rfecv.grid_scores_) == X.shape[1]
assert len(rfecv.ranking_) == X.shape[1]
def test_rfecv_verbose_output():
# Check verbose=1 is producing an output.
from io import StringIO
import sys
sys.stdout = StringIO()
generator = check_random_state(0)
|
cherry-wb/SideTools | examples/tutorial/t8.py | Python | apache-2.0 | 3,265 | 0.001225 | #!/usr/bin/env python
# PyQt tutorial 8
import sys
from PySide import QtCore, QtGui
class LCDRange(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
lcd = QtGui.QLCDNumber(2)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.slider.setRange(0, 99)
self.slider.setValue(0)
self.connect(self.slider, QtCore.SIGNAL("valueChanged(int)"),
lcd, QtCore.SLOT("display(int)"))
self.connect(self.slider, QtCore.SIGNAL("valueChanged(int)"),
self, QtCore.SIGNAL("valueChanged(int)"))
layout = QtGui.QVBoxLayout()
layout.addWidget(lcd)
layout.addWidget(self.slider)
self.setLayout(layout)
self.setFocusProxy(self.slider)
def value(self):
return self.slider.value()
def setValue(self, value):
self.slider.setValue(value) |
def setRange(self, minValue, maxValue):
if minValue < 0 or maxValue > 99 or minValue > maxValue:
QtCore.qWarning("LCDRange.setRange(%d, %d)\n"
"\tRange must be 0..99\n"
"\tand minValue must not be greater than maxValue" % (minValue, maxValue))
return
self.slider.setRange(minValue, maxValu | e)
class CannonField(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.currentAngle = 45
self.setPalette(QtGui.QPalette(QtGui.QColor(250, 250, 200)))
self.setAutoFillBackground(True)
def angle(self):
return self.currentAngle
def setAngle(self, angle):
if angle < 5:
angle = 5
if angle > 70:
angle = 70;
if self.currentAngle == angle:
return
self.currentAngle = angle
self.update()
self.emit(QtCore.SIGNAL("angleChanged(int)"), self.currentAngle)
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.drawText(200, 200, "Angle = %d" % self.currentAngle)
class MyWidget(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
quit = QtGui.QPushButton("Quit")
quit.setFont(QtGui.QFont("Times", 18, QtGui.QFont.Bold))
self.connect(quit, QtCore.SIGNAL("clicked()"),
QtGui.qApp, QtCore.SLOT("quit()"))
angle = LCDRange()
angle.setRange(5, 70)
cannonField = CannonField()
self.connect(angle, QtCore.SIGNAL("valueChanged(int)"),
cannonField.setAngle)
self.connect(cannonField, QtCore.SIGNAL("angleChanged(int)"),
angle.setValue)
gridLayout = QtGui.QGridLayout()
gridLayout.addWidget(quit, 0, 0)
gridLayout.addWidget(angle, 1, 0)
gridLayout.addWidget(cannonField, 1, 1, 2, 1)
gridLayout.setColumnStretch(1, 10)
self.setLayout(gridLayout)
angle.setValue(60)
angle.setFocus()
app = QtGui.QApplication(sys.argv)
widget = MyWidget()
widget.setGeometry(100, 100, 500, 355)
widget.show()
sys.exit(app.exec_())
|
makerplane/FIX-Gateway | tests/test_database.py | Python | gpl-2.0 | 20,881 | 0.001724 | # Copyright (c) 2018 Phil Birkelbach
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import unittest
import io
import time
import fixgw.database as database
# This is a poorly formatted example of a database configuration file.
# it should test leading/trailing spaces blank lines etc.
minimal_config = """
variables:
a: 8 #Generic Analogs
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
"""
minimal_list = []
for x in range(8):
minimal_list.append("ANLG{}".format(x+1))
variable_config = """
variables:
e: 4 # Engines
c: 6 # Cylinders
t: 20 # Fuel Tanks
entries:
- key: EGTec
description: Exhaust Gas Temp Engine %e, Cylinder %c
type: float
min: 0.0
max: 1000.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: FUELQt
description: Fuel Quantity Tank %t
type: float
min: 0.0
max: 200.0
units: gal
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,lowAlarm]
"""
variable_list = []
for e in range(4):
for c in range(6):
variable_list.append("EGT{}{}".format(e+1,c+1))
for t in range(20):
variable_list.append("FUELQ{}".format(t+1))
variable_list.sort()
general_config = """
variables:
e: 1 # Engines
c: 6 # Cylinders
a: 8 # Generic Analogs
b: 16 # Generic Buttons
r: 1 # Encoders
t: 2 # Fuel Tanks
entries:
- key: ANLGa
description: Generic Analog %a
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 2000
- key: BTNb
description: Generic Button %b
type: bool
tol: 0
- key: ENCr
description: Generic Encoder %r
type: int
min: -32768
max: 32767
units: Pulses
initial: 0
tol: 0
- key: IAS
description: Indicated Airspeed
type: float
min: 0.0
max: 1000.0
units: knots
initial: 0.0
tol: 2000
aux: [Min,Max,V1,V2,Vne,Vfe,Vmc,Va,Vno,Vs,Vs0,Vx,Vy]
- key: IASW
description: Indicated Airspeed Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: TAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: CAS
description: True Airspeed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: GS
description: Ground Speed
type: float
min: 0.0
max: 2000.0
units: knots
initial: 0.0
tol: 2000
- key: ALT
description: Indicated Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: TALT
description: True Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: DALT
description: Density Altitude
type: float
min: -1000.0
max: 60000.0
units: ft
initial: 0.0
tol: 2000
- key: BARO
description: Altimeter Setting
type: float
min: 0.0
max: 35.0
units: inHg
initial: 29.92
tol: 2000
- key: AIRPRESS
description: Air Pressure
type: float
min: 0.0
max: 200000.0
units: Pa
initial: 101325.0
tol: 2000
- key: VS
description: Vertical Speed
type: float
min: -30000.0
max: 30000.0
units: ft/min
initial: 0.0
tol: 2000
aux: [Min,Max]
- key: HEAD
description: Current Aircraft Magnetic Heading
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACK
description: Current Aircraft Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: TRACKM
description: Current Aircraft Magnetic Bearing
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: COURSE
description: Selected Course
type: float
min: 0.0
max: 359.9
units: deg
initial: 0.0
tol: 2000
- key: CDI
description: Course Deviation Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: GSI
description: Glideslope Indicator
type: float
min: -1.0
max: 1.0
initial: 0.0
tol: 2000
- key: XTRACK
description: Cross Track Error
type: float
min: 0.0
max: 100.0
units: nM
initial: 0.0
tol: 2000
- key: OAT
description: Outside Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn]
- key: CAT
description: Cabin Air Temperature
type: float
min: -100.0
max: 100.0
units: degC
initial: 0.0
tol: 2000
aux: [Min,Max,lowWarn,highWarn,lowAlarm,highAlarm]
- key: OATW
description: Outside Air Temperature Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ROLL
description: Roll Angle
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: PITCH
description: Pitch Angle
type: float
min: -90.0
max: 90.0
units: deg
initial: 0.0
tol: 200
- key: ORISYSW
description: Orientation System Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: GYROW
description: Gyroscope sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: ACCELW
description: Acceleration sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: MAGW
description: Magnetic sensor Warning
type: int
min: 0
max: 5
units: warninglevel
initial: 0
tol: 2000
- key: PITCHSET
description: Pitch angle setting
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
- key: YAW
description: Yaw Angle
type: float
| min: -180.0
ma | x: 180.0
units: deg
initial: 0.0
tol: 200
- key: AOA
description: Angle of attack
type: float
min: -180.0
max: 180.0
units: deg
initial: 0.0
tol: 200
aux:
- Min
- Max
- 0g
- Warn
- Stall
- key: CTLPTCH
description: Pitch Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLROLL
description: Roll Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLYAW
description: Yaw Control (Rudder)
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLCOLL
description: Collective Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLATP
description: AntiTorque Pedal Ctrl
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLFLAP
description: Flap Control
type: float
min: -1.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLLBRK
description: Left Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 200
- key: CTLRBRK
description: Right Brake Control
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: ANORM
description: Normal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALAT
description: Lateral Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: ALONG
description: Longitudinal Acceleration
type: float
min: -30.0
max: 30.0
units: g
initial: 0.0
tol: 200
- key: THRe
description: Throttle Control Engine %e
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: MIXe
description: Mixture Control Engine %e
type: float
min: 0.0
max: 1.0
units: '%/100'
initial: 0.0
tol: 1000
- key: OILPe
description: Oil Pressure Engine %e
type: float
min: 0.0
max: 200.0
units: psi
initial |
mitya57/debian-buildbot | buildbot/db/buildsets.py | Python | gpl-2.0 | 7,930 | 0.000883 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Support for buildsets in the database
"""
import sqlalchemy as sa
from buildbot.db import base
from buildbot.util import datetime2epoch
from buildbot.util import epoch2datetime
from buildbot.util import json
from twisted.internet import reactor
class BsDict(dict):
pass
class BuildsetsConnectorComponent(base.DBConnectorComponent):
# Documentation is in developer/database.rst
def addBuildset(self, sourcestampsetid, reason, properties, builderNames,
external_idstring=None, _reactor=reactor):
def thd(conn):
buildsets_tbl = self.db.model.buildsets
submitted_at = _reactor.seconds()
self.check_length(buildsets_tbl.c.reason, reason)
self.check_length(buildsets_tbl.c.external_idstring,
external_idstring)
transaction = conn.begin()
# insert the buildset itself
r = conn.execute(buildsets_tbl.insert(), dict(
sourcestampsetid=sourcestampsetid, submitted_at=submitted_at,
reason=reason, complete=0, complete_at=None, results=-1,
external_idstring=external_idstring))
bsid = r.inserted_primary_key[0]
# add any properties
if properties:
bs_props_tbl = self.db.model.buildset_properties
inserts = [
dict(buildsetid=bsid, property_name=k,
property_value=json.dumps([v, s]))
for k, (v, s) in properties.iteritems()]
for i in inserts:
self.check_length(bs_props_tbl.c.property_name,
i['property_name'])
conn.execute(bs_props_tbl.insert(), inserts)
# and finish with a build request for each builder. Note that
# sqlalchemy and the Python DBAPI do not provide a way to recover
# inserted IDs from a multi-row insert, so this is done one row at
# a time.
brids = {}
br_tbl = self.db.model.buildrequests
ins = br_tbl.insert()
for buildername in builderNames:
self.check_length(br_tbl.c.buildername, buildername)
r = conn.execute(ins,
dict(buildsetid=bsid, buildername=buildername, priority=0,
claimed_at=0, claimed_by_name=None,
claimed_by_incarnation=None, complete=0, results=-1,
submitted_at=submitted_at, complete_at=None))
brids[buildername] = r.inserted_primary_key[0]
transaction.commit()
return (bsid, brids)
return self.db.pool.do(thd)
def completeBuildset(self, bsid, results, complete_at=None,
_reactor=reactor):
if complete_at is not None:
complete_at = datetime2epoch(complete_at)
else:
complete_at = _reactor.seconds()
def thd(conn):
tbl = self.db.model.buildsets
q = tbl.update(whereclause=(
(tbl.c.id == bsid) &
((tbl.c.complete == None) | (tbl.c.complete != 1))))
res = conn.execute(q,
complete=1,
results=results,
complete_at=complete_at)
if res.rowcount != 1:
raise KeyError
return self.db | .pool.do(thd)
def getBuildset(self, bsid):
def thd(conn):
| bs_tbl = self.db.model.buildsets
q = bs_tbl.select(whereclause=(bs_tbl.c.id == bsid))
res = conn.execute(q)
row = res.fetchone()
if not row:
return None
return self._row2dict(row)
return self.db.pool.do(thd)
def getBuildsets(self, complete=None):
def thd(conn):
bs_tbl = self.db.model.buildsets
q = bs_tbl.select()
if complete is not None:
if complete:
q = q.where(bs_tbl.c.complete != 0)
else:
q = q.where((bs_tbl.c.complete == 0) |
(bs_tbl.c.complete == None))
res = conn.execute(q)
return [self._row2dict(row) for row in res.fetchall()]
return self.db.pool.do(thd)
def getRecentBuildsets(self, count, branch=None, repository=None,
complete=None):
def thd(conn):
bs_tbl = self.db.model.buildsets
ss_tbl = self.db.model.sourcestamps
j = sa.join(self.db.model.buildsets,
self.db.model.sourcestampsets)
j = j.join(self.db.model.sourcestamps)
q = sa.select(columns=[bs_tbl], from_obj=[j],
distinct=True)
q = q.order_by(sa.desc(bs_tbl.c.submitted_at))
q = q.limit(count)
if complete is not None:
if complete:
q = q.where(bs_tbl.c.complete != 0)
else:
q = q.where((bs_tbl.c.complete == 0) |
(bs_tbl.c.complete == None))
if branch:
q = q.where(ss_tbl.c.branch == branch)
if repository:
q = q.where(ss_tbl.c.repository == repository)
res = conn.execute(q)
return list(reversed([self._row2dict(row)
for row in res.fetchall()]))
return self.db.pool.do(thd)
def getBuildsetProperties(self, buildsetid):
"""
Return the properties for a buildset, in the same format they were
given to L{addBuildset}.
Note that this method does not distinguish a nonexistent buildset from
a buildset with no properties, and returns C{{}} in either case.
@param buildsetid: buildset ID
@returns: dictionary mapping property name to (value, source), via
Deferred
"""
def thd(conn):
bsp_tbl = self.db.model.buildset_properties
q = sa.select(
[bsp_tbl.c.property_name, bsp_tbl.c.property_value],
whereclause=(bsp_tbl.c.buildsetid == buildsetid))
l = []
for row in conn.execute(q):
try:
properties = json.loads(row.property_value)
l.append((row.property_name,
tuple(properties)))
except ValueError:
pass
return dict(l)
return self.db.pool.do(thd)
def _row2dict(self, row):
def mkdt(epoch):
if epoch:
return epoch2datetime(epoch)
return BsDict(external_idstring=row.external_idstring,
reason=row.reason, sourcestampsetid=row.sourcestampsetid,
submitted_at=mkdt(row.submitted_at),
complete=bool(row.complete),
complete_at=mkdt(row.complete_at), results=row.results,
bsid=row.id)
|
ChinaMassClouds/copenstack-server | openstack/src/ceilometer-2014.2.2/ceilometer/alarm/notifier/trust.py | Python | gpl-2.0 | 2,433 | 0 | #
# Copyright 2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Rest alarm notifier with trusted authentication."""
from keystoneclient.v3 import client as keystone_client
from oslo.config import cfg
from six.moves.urllib import parse
from ceilometer.alarm.notifier import rest
cfg.CONF.import_opt('http_timeout', 'ceilometer.service')
cfg.CONF.import_group('service_credentials', 'ceilometer.service')
class TrustRestAlarmNotifier(rest.RestAlarmNotifier):
"""Notifier supporting keystone trust authentication.
This alarm notifier is intended to be used to call an endpoint using
keystone authentication. It uses the ceilometer service user to
authenticate using the trust ID provided.
The URL must be in the form trust+http://trust-id@host/action.
"""
@staticmethod
def notify(action, alarm_id, previous, current, reason, reason_data):
trust_id = action.username
auth_url = cfg.CONF.service_credentials.os_auth_url.replace(
"v2.0", "v3")
client = keystone_client.Client(
username=cfg.CONF.service_credentials.os_username,
password=cfg.CONF.service_credentials.os_password,
cacert=cfg.CONF.service_credentials.os_cacert,
auth_url=auth_url,
region_name=cfg.CONF.service_credentials.os_region_name,
insecure=cfg.CONF.service_credentials.insecure,
timeout=cfg.CONF.http_timeout,
trust_id=trust_id)
# Remove the fake user
netloc = action.netloc.s | plit("@")[1]
# Remove the trust prefix
scheme = action.scheme[6:]
action = parse.SplitResult(scheme, netloc, action.path, action.query,
action.fragment)
headers = {'X-Auth-Token': client.auth_token}
rest.RestAlarmNotif | ier.notify(
action, alarm_id, previous, current, reason, reason_data, headers)
|
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/lepl/stream/core.py | Python | agpl-3.0 | 12,016 | 0.006491 |
# The contents of this file are subject to the Mozilla Public License
# (MPL) Version 1.1 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License
# at http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is LEPL (http://www.acooke.org/lepl)
# The Initial Developer of the Original Code is Andrew Cooke.
# Portions created by the Initial Developer are Copyright (C) 2009-2010
# Andrew Cooke (andrew@acooke.org). All Rights Reserved.
#
# Alternatively, the contents of this file may be used under the terms
# of the LGPL license (the GNU Lesser General Public License,
# http://www.gnu.org/licenses/lgpl.html), in which case the provisions
# of the LGPL License are applicable instead of those above.
#
# If you wish to allow use of your version of this file only under the
# term | s of the LGPL License and not to allow others to use your version
# of this file under the MPL, indicate your decision by deleting the
# provisions above and replace them with the notice and other provisions
# required by the LGPL License. If you do not delete the | provisions
# above, a recipient may use your version of this file under either the
# MPL or the LGPL License.
'''
Default implementations of the stream classes.
A stream is a tuple (state, helper), where `state` will vary from location to
location, while `helper` is an "unchanging" instance of `StreamHelper`,
defined below.
For simple streams state can be a simple integer and this approach avoids the
repeated creation of objects. More complex streams may choose to not use
the state at all, simply creating a new helper at each point.
'''
from abc import ABCMeta
from lepl.support.lib import fmt
#class _SimpleStream(metaclass=ABCMeta):
# Python 2.6
# pylint: disable-msg=W0105, C0103
_StreamHelper = ABCMeta('_StreamHelper', (object, ), {})
'''ABC used to identify streams.'''
DUMMY_HELPER = object()
'''Allows tests to specify an arbitrary helper in results.'''
OFFSET, LINENO, CHAR = range(3)
'''Indices into delta.'''
class StreamHelper(_StreamHelper):
'''
The interface that all helpers should implement.
'''
def __init__(self, id=None, factory=None, max=None, global_kargs=None,
cache_level=None):
from lepl.stream.factory import DEFAULT_STREAM_FACTORY
self.id = id if id is not None else hash(self)
self.factory = factory if factory else DEFAULT_STREAM_FACTORY
self.max = max if max else MutableMaxDepth()
self.global_kargs = global_kargs if global_kargs else {}
self.cache_level = 1 if cache_level is None else cache_level
def __repr__(self):
'''Simplify for comparison in tests'''
return '<helper>'
def __eq__(self, other):
return other is DUMMY_HELPER or super(StreamHelper, self).__eq__(other)
def __hash__(self):
return super(StreamHelper, self).__hash__()
def key(self, state, other):
'''
Generate an object that can be hashed (implements __hash__ and __eq__).
See `HashKey`.
'''
raise NotImplementedError
def kargs(self, state, prefix='', kargs=None):
'''
Generate a dictionary of values that describe the stream. These
may be extended by subclasses. They are provided to
`syntax_error_kargs`, for example.
`prefix` modifies the property names
`kargs` allows values to be provided. These are *not* overwritten,
so if there is a name clash the provided value remains.
Note: Calculating this can be expensive; use only for error messages,
not debug messages (that may be discarded).
The following names will be defined (at a minimum).
For these value the "global" prefix indicates the underlying stream
when, for example, tokens are used (other values will be relative to
the token). If tokens etc are not in use then global and non-global
values will agree.
- data: a line representing the data, highlighting the current offset
- global_data: as data, but for the entire sequence
- text: as data, but without a "[...]" at the end
- global_text: as text, but for the entire sequence
- type: the type of the sequence
- global_type: the type of the entire sequence
- global_offset: a 0-based index into the underlying sequence
These values are always local:
- offset: a 0-based index into the sequence
- rest: the data following the current point
- repr: the current value, or <EOS>
- str: the current value, or an empty string
These values are always global:
- filename: a filename, if available, or the type
- lineno: a 1-based line number for the current offset
- char: a 1-based character count within the line for the current offset
- location: a summary of the current location
'''
raise NotImplementedError
def fmt(self, state, template, prefix='', kargs=None):
'''fmt a message using the expensive kargs function.'''
return fmt(template, **self.kargs(state, prefix=prefix, kargs=kargs))
def debug(self, state):
'''Generate an inexpensive debug message.'''
raise NotImplementedError
def next(self, state, count=1):
'''
Return (value, stream) where `value` is the next value (or
values if count > 1) from the stream and `stream` is advanced to the
next character. Note that `value` is always a sequence (so if the
stream is a list of integers, and `count`=1, then it will be a
unitary list, for example).
Should raise StopIteration when no more data are available.
'''
raise StopIteration
def join(self, state, *values):
'''
Join sequences of values into a single sequence.
'''
raise NotImplementedError
def empty(self, state):
'''
Return true if no more data available.
'''
raise NotImplementedError
def line(self, state, empty_ok):
'''
Return (values, stream) where `values` correspond to something
like "the rest of the line" from the current point and `stream`
is advanced to the point after the line ends.
If `empty_ok` is true and we are at the end of a line, return an
empty line, otherwise advance (and maybe raise a StopIteration).
'''
raise NotImplementedError
def len(self, state):
'''
Return the remaining length of the stream. Streams of unknown
length (iterables) should raise a TypeError.
'''
raise NotImplementedError
def stream(self, state, value, id_=None, max=None):
'''
Return a new stream that encapsulates the value given, starting at
`state`. IMPORTANT: the stream used is the one that corresponds to
the start of the value.
For example:
(line, next_stream) = s_line(stream, False)
token_stream = s_stream(stream, line) # uses stream, not next_stream
This is used when processing Tokens, for example, or columns (where
fragments in the correct column area are parsed separately).
'''
raise NotImplementedError
def deepest(self):
'''
Return a stream that represents the deepest match. The stream may be
incomplete in some sense (it may not be possible to use it for
parsing more data), but it will have usable fmt and kargs methods.
'''
raise NotImplementedError
def delta(self, state):
'''
Return the offset, lineno and char of the current point, relative to
the entire stream |
xyang619/AdmixSim | mssim2eigen.py | Python | gpl-3.0 | 3,816 | 0.019654 | '''
Name: mssim2eigen.py
Date: 2014-8-25
Version: 1.01
Author: Young
Description:
Convert the output of MS simulation into eigenstrat format, only deal with
1 repeat siutation
Input file: the output of MS simulation
Output files: prefix.ind prefix.snp prefix.geno
Arguments:
-h --help print help
-f --file filename name of MS output file [string]
-n --npops n,n1,n2 ... #subpopulations and #samples [integer]
-l --length L length of simulated sequence [integer]
-p --prefix prefix prefix of output files [string]
'''
import sys,random, getopt
def gen_allele():
return random.sample('AGCT',2)
def gen_ind(npop,ninds,prefix):
indfile='{}.ind'.format(prefix)
with open(indfile, 'w') as out:
i=1
for nind in ninds:
for k in range(nind):
out.write('SAM{}_{}\tU\tPOP{}\n'.format(i,k+1,i))
i+=1
print('Write indfile into {}'.format(indfile))
def gen_snp(posline,L,prefix):
snpfile='{}.snp'.format(prefix)
with open(snpfile, 'w') as out:
gp=posline.split()[1:]
i=1
scale = L*1.0e-8 #Assume 1 Morgan = 100Mb
for g in gp:
pp=int(float(g)*L)
a1,a2=gen_allele()
gp=float(g)*scale
out.write('rs{ | }\t1\t{:.8f}\t{}\t{}\t{}\n'.format(i,gp,pp,a1,a2))
i+=1
print('Write snpfile into {}'.format(snpfile))
def gen(simfile, npop=1, ninds=None, L=1e7, prefix='sim'):
gen_ind(npop, ninds, prefix)
genofile='{}.geno'.format(prefix)
with open(simfile) as f,open(genofile, 'w') as gf:
for i i | n range(5):
f.readline() #skip 5 line
posline=f.readline() #posline
gen_snp(posline, L, prefix)
tm=[] #temp matrix
line=f.readline()
while line:
h1=[]
for c in line[:-1]:
h1.append(int(c))
i=0
line2=f.readline()
for c in line2[:-1]:
h1[i]+=int(c)
i+=1
tm.append(h1)
line=f.readline()
for k in range(len(tm[0])):
ost=''
for m in range(len(tm)):
ost+=repr(tm[m][k])
ost+='\n'
gf.write(ost)
print('write genofile into {}'.format(genofile))
def usage():
print('''Description:
Convert the output of MS simulation into eigenstrat format
Note: currently only deal with 1 repeat siutation
Input file: the output of MS simulation
Output files: prefix.ind prefix.snp prefix.geno
Arguments:
-h --help print help
-f --file filename name of MS output file [string]
-n --npops n,n1,n2 ... #subpopulations and #samples [integer]
-l --length L length of simulated sequence [integer]
-p --prefix prefix prefix of output files [string]
''')
def main():
f=''
n=1
ns=[]
l=1e7
p='sim'
try:
opts, args = getopt.getopt(sys.argv[1:], 'hf:n:l:p:',
['help','file','npops','length','prefix'])
except getopt.GetoptError as err:
print(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(1)
elif o in ('-f', '--file'):
f=a
elif o in ('-n', '--npops'):
lst=a.split(',')
n=int(lst[0])
for k in lst[1:]:
ns.append(int(k))
elif o in ('-l', '--length'):
l=int(a)
elif o in ('-p', '--prefix'):
p=a
print(f,n,ns,l,p)
assert (len(ns) == n), 'the number of populations are not equal'
assert (len(f) > 0), 'Input file is empty'
gen(f, n, ns, l, p)
if __name__=='__main__':
main()
|
openstack/storlets | tests/unit/swift_middleware/handlers/test_proxy.py | Python | apache-2.0 | 34,297 | 0 | # Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
import itertools
from contextlib import contextmanager
from swift.common.swob import Request, HTTPOk, HTTPCreated, HTTPAccepted, \
HTTPNoContent, HTTPNotFound
from storlets.swift_middleware.handlers import StorletProxyHandler
from storlets.swift_middleware.handlers.proxy import REFERER_PREFIX
from tests.unit.swift_middleware.handlers import \
BaseTestStorletMiddleware, create_handler_config
@contextmanager
def fake_acc_info(acc_info):
with mock.patch('storlets.swift_middleware.handlers.proxy.'
'get_account_info') as ai:
ai.return_value = acc_info
yield
@contextmanager
def storlet_enabled():
acc_info = {'meta': {'storlet-enabled': 'true'}}
with fake_acc_info(acc_info):
yield
class TestStorletMiddlewareProxy(BaseTestStorletMiddleware):
def setUp(self):
super(TestStorletMiddlewareProxy, self).setUp(exec_server='proxy')
def test_load_app(self):
try:
self.get_app(self.base_app, self.conf)
except Exception:
self.fail('Application loading got an error')
def get_request_response(self, target, method, headers=None, body=None):
# Ensure the body is byte format on py3, this is needed until
# swift's Request supports byte format body when body is None in args
body = body or b''
req = Request.blank(target, environ={'REQUEST_METHOD': method},
headers=headers, body=body)
return self.get_response(req)
def test_GET_without_storlets(self):
def basic_get(path):
self.base_app.register('GET', path, HTTPOk, body=b'FAKE APP')
resp = self.get_request_response(path, 'GET')
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'FAKE APP', resp.body)
self.base_app.reset_all()
for target in ('AUTH_a', 'AUTH_a/c', 'AUTH_a/c/o'):
path = '/'.join(['', 'v1', target])
basic_get(path)
def test_GET_with_storlets(self):
# TODO(takashi): decide request path based on config value
target = '/v1/AUTH_a/c/o'
self.base_app.register('GET', target, HTTPOk, body=b'FAKE RESULT')
storlet = '/v1/AUTH_a/storlet/Storlet-1.0.jar'
self.base_app.register('GET', storlet, HTTPOk, headers={},
body=b'jar binary')
acc_info = {'meta': {'storlet-enabled': 'true'}}
with fake_acc_info(acc_info):
headers = {'X-Run-Storlet': 'Storlet-1.0.jar'}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'FAKE RESULT', resp.body)
calls = self.base_app.get_calls()
# Make sure now we sent two requests to swift
self.assertEqual(2, len(calls))
# The first one is HEAD request to storlet object
self.assertEqual('HEAD', calls[0][0])
self.assertEqual(storlet, calls[0][1])
# The last one is exexution GET call
self.assertEqual(target, calls[-1][1])
self.assertIn('X-Run-Storlet', calls[-1][2])
def test_GET_with_storlets_disabled_account(self):
target = '/v1/AUTH_a/c/o'
acc_info = {'meta': {}}
with fake_acc_info(acc_info):
headers = {'X-Run-Storlet': 'Storlet-1.0.jar'}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('400 Bad Request', resp.status)
calls = self.base_app.get_calls()
self.assertEqual(0 | , len(calls))
def test_GET_with_storlets_object_404(self):
target = '/v1/AUTH_a/c/o'
self.base_app.register('GET', target, HTTPNotFound)
storlet = '/v1/AUTH_a/storlet/Storlet-1.0.jar'
self.base_app.register('GET', storlet, HTTPOk, body=b'jar binary')
with storlet_enabled():
headers = {'X-Run-Storlet': 'Storlet | -1.0.jar'}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('404 Not Found', resp.status)
calls = self.base_app.get_calls()
self.assertEqual(2, len(calls))
def test_GET_with_storlets_and_http_range(self):
target = '/v1/AUTH_a/c/o'
with storlet_enabled():
headers = {'X-Run-Storlet': 'Storlet-1.0.jar',
'Range': 'bytes=10-20'}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('400 Bad Request', resp.status)
def test_GET_with_storlets_and_storlet_range(self):
target = '/v1/AUTH_a/c/o'
self.base_app.register('GET', target, HTTPOk, body=b'FAKE APP')
storlet = '/v1/AUTH_a/storlet/Storlet-1.0.jar'
self.base_app.register('GET', storlet, HTTPOk, body=b'jar binary')
with storlet_enabled():
req_range = 'bytes=1-6'
headers = {'X-Run-Storlet': 'Storlet-1.0.jar',
'X-Storlet-Run-On-Proxy': '',
'X-Storlet-Range': req_range}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'AKE AP', resp.body)
self.assertNotIn('Content-Range', resp.headers)
self.assertEqual('bytes 1-6/8',
resp.headers['Storlet-Input-Range'])
raw_req = self.base_app.get_calls('GET', target)[0]
for key in ['Range', 'X-Storlet-Range']:
self.assertEqual(raw_req[2][key], req_range)
def test_GET_with_storlets_and_object_storlet_range(self):
# Create a single range request that needs to be
# processed by the object handler
target = '/v1/AUTH_a/c/o'
self.base_app.register('GET', target, HTTPOk, body=b'FAKE APP')
storlet = '/v1/AUTH_a/storlet/Storlet-1.0.jar'
self.base_app.register('GET', storlet, HTTPOk, body=b'jar binary')
with storlet_enabled():
req_range = 'bytes=1-6'
headers = {'X-Run-Storlet': 'Storlet-1.0.jar',
'X-Storlet-Range': req_range}
resp = self.get_request_response(target, 'GET', headers=headers)
# We assert that nothing actually happens
# by the proxy handler
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'FAKE APP', resp.body)
def test_GET_with_storlets_and_extra_resourece(self):
target = '/v1/AUTH_a/c/o'
self.base_app.register('GET', target, HTTPOk, body=b'FAKE APP')
extra_target = '/v1/AUTH_a/c2/o2'
self.base_app.register('GET', extra_target, HTTPOk, body=b'Whooa')
storlet = '/v1/AUTH_a/storlet/Storlet-1.0.jar'
self.base_app.register('GET', storlet, HTTPOk, body=b'jar binary')
with storlet_enabled():
headers = {'X-Run-Storlet': 'Storlet-1.0.jar',
'X-Storlet-Extra-Resources': '/c2/o2'}
resp = self.get_request_response(target, 'GET', headers=headers)
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'FAKE APP', resp.body)
# GET target called
self.assertTrue(any(self.base_app.get_calls('GET', target)))
# GET extra target also called
self.assertTrue(any(self.base_app.get_calls('GET', extra_target)))
def test_GET_slo_without_storlets(self):
target = '/v1/AUTH_a/ |
blackgnezdo/mailcrypt | tests/remailer/gtkwatcher.py | Python | gpl-2.0 | 11,898 | 0.002353 | #! /usr/bin/python
if __name__ == '__main__':
import pygtk
pygtk.require("2.0")
import time, cPickle
import gobject, gtk, gtk.glade
from watcher import Watcher
def time_string(latency):
if latency == None:
return "?"
latency = int(latency)
hours = latency / 3600
latency -= hours * 3600
minutes = latency / 60
latency -= minutes * 60
seconds = latency
latency = ''
if hours:
latency = '%dh' % hours
if hours or minutes:
latency = latency + '%dm' % minutes
latency = latency + '%ds' % seconds
return latency
class WatcherGUI:
def __init__(self, watcher):
self.watcher = watcher
watcher.gui = self
self.done = 0
xml = gtk.glade.XML('gtkwatcher.glade')
self.xml = xml
# main menu
xml.signal_connect('do_poll', self.do_poll)
xml.signal_connect('do_exit', self.do_quit)
# source panel
self.src_popup = xml.get_widget("src_popup")
xml.signal_connect('do_src_abandon', self.do_src_abandon)
xml.get_widget("source_message_options1").set_sensitive( | 0)
self.src_age_item = xml.get_widget("src_age_item")
self.src_age_item.set_sensitive(0)
self.src_abandon_item = xml.get_widget("src_abandon_item")
# dest panel
self.dst_popup = xml.get_widget("dst_popup")
xml.get_widget("dest_message_options1").set_sensitive(0)
self.dst_sent_item = xml.get_widget("dst_s | ent_item")
self.dst_sent_item.set_sensitive(0)
self.dst_original_item = xml.get_widget("dst_original_item")
self.dst_flush_item = xml.get_widget("dst_flush_item")
xml.signal_connect('do_dst_flush', self.do_dst_flush)
xml.signal_connect('do_dst_original', self.do_dst_original)
# panel contents
self.src_model = gtk.ListStore(gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_PYOBJECT)
self.dst_model = gtk.ListStore(gobject.TYPE_STRING,
gobject.TYPE_STRING,
gobject.TYPE_PYOBJECT)
view = xml.get_widget('src_treeview')
view.connect("button_press_event", self.do_src_popup)
view.set_model(self.src_model)
r = gtk.CellRendererText()
view.append_column(gtk.TreeViewColumn("Message ID", r, text=0))
view.append_column(gtk.TreeViewColumn("Message Sent", r, text=1))
sel = view.get_selection()
sel.set_mode(gtk.SELECTION_SINGLE)
sel.connect("changed", self.do_src_select)
self.src_sel = sel
view = xml.get_widget('dst_treeview')
view.connect("button_press_event", self.do_dst_popup)
view.set_model(self.dst_model)
r = gtk.CellRendererText()
view.append_column(gtk.TreeViewColumn("Message ID", r, text=0))
view.append_column(gtk.TreeViewColumn("Latency", r, text=1))
sel = view.get_selection()
sel.set_mode(gtk.SELECTION_SINGLE)
sel.connect("changed", self.do_dst_select)
self.dst_sel = sel
self.srcwin = xml.get_widget('srcwin')
self.dstwin = xml.get_widget('dstwin')
#self.src_clist.connect('select-row', self.do_src_select)
#self.dst_clist.connect('select-row', self.do_dst_select)
self.text = xml.get_widget('text1')
self.text.set_wrap_mode(gtk.WRAP_NONE)
self.textwin = xml.get_widget('textwin')
xml.get_widget('window1').set_size_request(500,300)
self.do_update()
def do_quit(self, widget):
print "doing quit"
self.done = 1
# mainquit asserts, because we aren't actually in a mainloop
#gtk.mainquit()
def update_text(self, text, skipHeaders=0):
buf = self.text.get_buffer()
buf.set_text(text)
# now make the end of the buffer visible
# XXX: this flashes. They removed freeze/thaw.. how to fix?
# XXX: if skipHeaders, find the first blank line and put that at top
iter = buf.get_iter_at_line(-1)
#print iter.get_line()
# turn it into a mark, as scroll_to_iter depends upon height
# calculations that are done in an idle task, so it won't get it right
# until later
mark = buf.create_mark("end", iter, 0)
if skipHeaders:
self.text.scroll_to_mark(mark, within_margin=0)
def do_src_select(self, sel):
model, iter = sel.get_selected()
if not iter:
return # deselected
m = model.get_value(iter, 2)
# get the message text from the Outstanding list
text = m.data
self.update_text(text)
# need to deselect the one in the other list so we can sense when it
# becomes reselected
self.dst_sel.unselect_all()
def do_src_popup(self, view, event):
if event.button != 3:
return
pathset = view.get_path_at_pos(event.x, event.y)
if pathset:
path, viewcol, cell_x, cell_y = pathset
iter = self.src_model.get_iter(path)
m = self.src_model.get_value(iter, 2)
age = self.watcher.age(m.msgid)
label = self.src_age_item.get_child()
label.set_text("Age[%d]: %s" % (m.msgid, time_string(age)))
label = self.src_abandon_item.get_child()
label.set_text("Abandon Message [%d]" % m.msgid)
self.src_abandon_item.set_sensitive(1)
else:
label = self.src_age_item.get_child()
label.set_text("Age: --")
label = self.src_abandon_item.get_child()
label.set_text("Abandon Message")
self.src_abandon_item.set_sensitive(0)
self.src_popup.popup(None, None, None, event.button, event.time)
def do_src_abandon(self, menuitem):
# which message? find the selection
model, iter = self.src_sel.get_selected()
if not iter:
print "abandon, no iter!"
return
m = model.get_value(iter, 2)
print "abandon msgid", m.msgid
self.watcher.abandon(m.msgid)
self.do_update()
def do_dst_popup(self, view, event):
if event.button != 3:
return
pathset = view.get_path_at_pos(event.x, event.y)
if pathset:
path, viewcol, cell_x, cell_y = pathset
iter = self.dst_model.get_iter(path)
m = self.dst_model.get_value(iter, 2)
txtime = self.watcher.txtime(m.msgid)
sent = time.strftime("%H:%M %d %b %Y", time.localtime(txtime))
label = self.dst_sent_item.get_child()
label.set_text("Sent[%d]: %s" % (m.msgid, sent))
self.dst_original_item.set_sensitive(1)
label = self.dst_original_item.get_child()
label.set_text("See Original [%d]" % m.msgid)
self.dst_flush_item.set_sensitive(1)
label = self.dst_flush_item.get_child()
label.set_text("Flush Message [%d]" % m.msgid)
else:
label = self.dst_sent_item.get_child()
label.set_text("Sent: --")
self.dst_original_item.set_sensitive(0)
label = self.dst_original_item.get_child()
label.set_text("See Original")
self.dst_flush_item.set_sensitive(0)
label = self.dst_flush_item.get_child()
label.set_text("Flush Message")
self.dst_popup.popup(None, None, None, event.button, event.time)
def do_dst_flush(self, menuitem):
# which message? find the selection
model, iter = self.dst_sel.get_selected()
if not iter:
return
m = model.get_value(iter, 2)
print "flush msgid", m.msgid
self.watcher.flush(m.msgid)
self.do_update()
def do_dst_original(self, menuitem):
# which message? find the selection
model, iter = self.dst_sel.get_selected()
if not iter:
return
dst_msg = model.get_value(iter, 2)
src_msg = self.watcher.source.msgs.get(dst_msg.msgid, None)
if src_ |
stoewer/nixpy | docs/source/examples/multipleROIs.py | Python | bsd-3-clause | 4,189 | 0.001433 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Copyright © 2014 German Neuroinformatics Node (G-Node)
All rights reserved.
Redistribution and use | in source and binary forms, with or without
modification, are permitted under the terms of the BSD License. See
LICENSE file in the root of the Project.
Author: Jan Grewe <jan.grewe@g-node.org>
This tutorial shows how to store image data in nix-files.
See https://github.com/G-node/nix/wiki for more information.
We use the "Lenna" image in this tutorial.
"Lenna" by Original full portrait: "Playmate of the Month". Playboy
Magazine. November 1 | 972, photographed by Dwight Hooker.This 512x512
electronic/mechanical scan of a section of the full portrait:
Alexander Sawchuk and two others[1] - The USC-SIPI image
database. Via Wikipedia -
http://en.wikipedia.org/wiki/File:Lenna.png#mediaviewer/File:Lenna.png
"""
import nixio as nix
import numpy as np
import Image as img
import matplotlib.pyplot as plt
def load_image():
image = img.open('lenna.png')
pix = np.array(image)
channels = list(image.mode)
return pix, channels
def draw_rect(img_data, position, extent):
img_data[position[0]:position[0] + extent[0], position[1], :] = 255
img_data[position[0]:position[0] + extent[0], position[1] + extent[1], :] = 255
img_data[position[0], position[1]:position[1] + extent[1], :] = 255
img_data[position[0] + extent[0], position[1]:position[1] + extent[1], :] = 255
return img_data
def plot_data(tag):
data_array = tag.references[0]
img_data = data_array[:]
img_data = np.array(img_data, dtype='uint8')
positions_data = tag.positions[:]
extents_data = tag.extents[:]
for i in range(positions.data_extent[0]):
img_data = draw_rect(
img_data, positions_data[i, :], extents_data[i, :])
new_img = img.fromarray(img_data)
new_img.show()
def plot_roi_data(tag):
position_count = tag.positions.shape[0]
for p in range(position_count):
roi_data = tag.retrieve_data(p, 0)[:]
roi_data = np.array(roi_data, dtype='uint8')
ax = plt.gcf().add_subplot(position_count, 1, p)
image = img.fromarray(roi_data)
ax.imshow(image)
plt.savefig('retrieved_rois.png')
plt.show()
if __name__ == '__main__':
img_data, channels = load_image()
# create a new file overwriting any existing content
file_name = 'multiple_roi.h5'
file = nix.File.open(file_name, nix.FileMode.Overwrite)
# create a 'Block' that represents a grouping object. Here, the recording session.
# it gets a name and a type
block = file.create_block("block name", "nix.session")
# create a 'DataArray' to take the sinewave, add some information about
# the signal
data = block.create_data_array("lenna", "nix.image.rgb", data=img_data)
# add descriptors for width, height and channels
height_dim = data.append_sampled_dimension(1)
height_dim.label = "height"
width_dim = data.append_sampled_dimension(1)
width_dim.label = "width"
color_dim = data.append_set_dimension()
color_dim.labels = channels
# some space for three regions-of-interest
roi_starts = np.zeros((3, 3))
roi_starts[0, :] = [250, 245, 0]
roi_starts[1, :] = [250, 315, 0]
roi_starts[2, :] = [340, 260, 0]
roi_extents = np.zeros((3, 3))
roi_extents[0, :] = [30, 45, 3]
roi_extents[1, :] = [30, 40, 3]
roi_extents[2, :] = [25, 65, 3]
# create the positions DataArray
positions = block.create_data_array("ROI positions", "nix.positions", data=roi_starts)
positions.append_set_dimension() # these can be empty
positions.append_set_dimension()
# create the extents DataArray
extents = block.create_data_array("ROI extents", "nix.extents", data=roi_extents)
extents.append_set_dimension()
extents.append_set_dimension()
# create a MultiTag
multi_tag = block.create_multi_tag("Regions of interest", "nix.roi", positions)
multi_tag.extents = extents
multi_tag.references.append(data)
# let's plot the data from the stored information
plot_data(multi_tag)
plot_roi_data(multi_tag)
file.close()
|
jcmgray/quijy | quimb/linalg/approx_spectral.py | Python | mit | 30,236 | 0 | """Use stochastic Lanczos quadrature to approximate spectral function sums of
any operator which has an efficient representation of action on a vector.
"""
import functools
from math import sqrt, log2, exp, inf, nan
import random
import warnings
import numpy as np
import scipy.linalg as scla
from scipy.ndimage.filters import uniform_filter1d
from ..core import ptr, prod, vdot, njit, dot, subtract_update_, divide_update_
from ..utils import int2tup, find_library, raise_cant_find_library_function
from ..gen.rand import randn, rand_rademacher, rand_phase, seed_rand
from ..linalg.mpi_launcher import get_mpi_pool
if find_library('opt_einsum') and find_library('autoray'):
from ..tensor.tensor_core import Tensor
from ..tensor.tensor_1d import MatrixProductOperator
from ..tensor.tensor_approx_spectral import construct_lanczos_tridiag_MPO
else:
reqs = '[opt_einsum,autoray]'
Tensor = raise_cant_find_library_function(reqs)
con | struct | _lanczos_tridiag_MPO = raise_cant_find_library_function(reqs)
# --------------------------------------------------------------------------- #
# 'Lazy' representation tensor contractions #
# --------------------------------------------------------------------------- #
def lazy_ptr_linop(psi_ab, dims, sysa, **linop_opts):
r"""A linear operator representing action of partially tracing a bipartite
state, then multiplying another 'unipartite' state::
( | )
+-------+
| psi_a | ______
+_______+ / \
a| |b |
+-------------+ |
| psi_ab.H | |
+_____________+ |
|
+-------------+ |
| psi_ab | |
+_____________+ |
a| |b |
| \______/
Parameters
----------
psi_ab : ket
State to partially trace and dot with another ket, with
size ``prod(dims)``.
dims : sequence of int, optional
The sub dimensions of ``psi_ab``.
sysa : int or sequence of int, optional
Index(es) of the 'a' subsystem(s) to keep.
"""
sysa = int2tup(sysa)
Kab = Tensor(np.asarray(psi_ab).reshape(dims),
inds=[('kA{}' if i in sysa else 'xB{}').format(i)
for i in range(len(dims))])
Bab = Tensor(Kab.data.conjugate(),
inds=[('bA{}' if i in sysa else 'xB{}').format(i)
for i in range(len(dims))])
return (Kab & Bab).aslinearoperator(
[f'kA{i}' for i in sysa],
[f'bA{i}' for i in sysa],
**linop_opts
)
def lazy_ptr_ppt_linop(psi_abc, dims, sysa, sysb, **linop_opts):
r"""A linear operator representing action of partially tracing a tripartite
state, partially transposing the remaining bipartite state, then
multiplying another bipartite state::
( | )
+--------------+
| psi_ab |
+______________+ _____
a| ____ b| / \
| / a\ | |c |
| | +-------------+ |
| | | psi_abc.H | |
\ / +-------------+ |
X |
/ \ +-------------+ |
| | | psi_abc | |
| | +-------------+ |
| \____/a |b |c |
a| | \_____/
Parameters
----------
psi_abc : ket
State to partially trace, partially transpose, then dot with another
ket, with size ``prod(dims)``.
``prod(dims[sysa] + dims[sysb])``.
dims : sequence of int
The sub dimensions of ``psi_abc``.
sysa : int or sequence of int, optional
Index(es) of the 'a' subsystem(s) to keep, with respect to all
the dimensions, ``dims``, (i.e. pre-partial trace).
sysa : int or sequence of int, optional
Index(es) of the 'b' subsystem(s) to keep, with respect to all
the dimensions, ``dims``, (i.e. pre-partial trace).
"""
sysa, sysb = int2tup(sysa), int2tup(sysb)
sys_ab = sorted(sysa + sysb)
Kabc = Tensor(np.asarray(psi_abc).reshape(dims),
inds=[('kA{}' if i in sysa else 'kB{}' if i in sysb else
'xC{}').format(i) for i in range(len(dims))])
Babc = Tensor(Kabc.data.conjugate(),
inds=[('bA{}' if i in sysa else 'bB{}' if i in sysb else
'xC{}').format(i) for i in range(len(dims))])
return (Kabc & Babc).aslinearoperator(
[('bA{}' if i in sysa else 'kB{}').format(i) for i in sys_ab],
[('kA{}' if i in sysa else 'bB{}').format(i) for i in sys_ab],
**linop_opts
)
# --------------------------------------------------------------------------- #
# Lanczos tri-diag technique #
# --------------------------------------------------------------------------- #
def inner(a, b):
"""Inner product between two vectors
"""
return vdot(a, b).real
def norm_fro(a):
"""'Frobenius' norm of a vector.
"""
return sqrt(inner(a, a))
def norm_fro_approx(A, **kwargs):
r"""Calculate the approximate frobenius norm of any hermitian linear
operator:
.. math::
\mathrm{Tr} \left[ A^{\dagger} A \right]
Parameters
----------
A : linear operator like
Operator with a dot method, assumed to be hermitian, to estimate the
frobenius norm of.
kwargs
Supplied to :func:`approx_spectral_function`.
Returns
-------
float
"""
return approx_spectral_function(A, lambda x: x**2, **kwargs)**0.5
def random_rect(shape, dist='rademacher', orthog=False, norm=True,
seed=False, dtype=complex):
"""Generate a random array optionally orthogonal.
Parameters
----------
shape : tuple of int
The shape of array.
dist : {'guassian', 'rademacher'}
Distribution of the random variables.
orthog : bool or operator.
Orthogonalize the columns if more than one.
norm : bool
Explicitly normalize the frobenius norm to 1.
"""
if seed:
# needs to be truly random so e.g. MPI processes don't overlap
seed_rand(random.SystemRandom().randint(0, 2**32 - 1))
if dist == 'rademacher':
V = rand_rademacher(shape, scale=1 / sqrt(prod(shape)), dtype=dtype)
# already normalized
elif dist == 'gaussian':
V = randn(shape, scale=1 / (prod(shape)**0.5 * 2**0.5), dtype=dtype)
if norm:
V /= norm_fro(V)
elif dist == 'phase':
V = rand_phase(shape, scale=1 / sqrt(prod(shape)), dtype=dtype)
# already normalized
else:
raise ValueError(f"`dist={dist}` not understood.")
if orthog and min(shape) > 1:
V = scla.orth(V)
V /= sqrt(min(V.shape))
return V
def construct_lanczos_tridiag(A, K, v0=None, bsz=1, k_min=10, orthog=False,
beta_tol=1e-6, seed=False, v0_opts=None):
"""Construct the tridiagonal lanczos matrix using only matvec operators.
This is a generator that iteratively yields the alpha and beta digaonals
at each step.
Parameters
----------
A : dense array, sparse matrix or linear operator
The operator to approximate, must implement ``.dot`` method to compute
its action on a vector.
K : int, optional
The maximum number of iterations and thus rank of the matrix to find.
v0 : vector, optional
The starting vector to iterate with, default to random.
bsz : int, optional
The block size (number of columns) of random vectors to iterate with.
k_min : int, optional
The minimum size of the krylov subspace for form.
orthog : bool, optional
If True, perform full re-orthogonalization for each new vector.
beta_tol : float, optional
The 'breakdown' tolerance. If the next beta ceofficient in the lanczos
matrix is less that this, implying that the full non-null space has
been found, terminate early.
seed : bool, optional
If True, seed the numpy random generator with a system |
TomoyoshiToyoda/language_processing | language_processing_100/python3/sect1/NLP00.py | Python | mit | 137 | 0.007299 | #! /usr/bin/env python3
if __name__ == '__main__':
message = " | stressed"
l = list( | message)
l.reverse()
print(''.join(l))
|
bnoordhuis/suv | deps/gyp/test/ios/gyptest-per-config-settings.py | Python | isc | 1,320 | 0.010606 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that device and simulator bundles are built corre | ctly.
"""
import TestGyp
import subprocess
import sys
def CheckFileType(file, expected):
proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
o = proc.communicate()[0].strip()
assert not proc.returncode
if not expected in o:
print 'File: Expected %s, got %s' % (expected, o)
test.fail_test()
if sys.platform == 'darwin':
# TODO(justincohen): Enable this in xcode too once ninja can codesign and bots
# are configured with signing ce | rts.
test = TestGyp.TestGyp(formats=['ninja'])
test.run_gyp('test-device.gyp', chdir='app-bundle')
for configuration in ['Debug-iphoneos', 'Debug-iphonesimulator']:
test.set_configuration(configuration)
test.build('test-device.gyp', test.ALL, chdir='app-bundle')
result_file = test.built_file_path('Test App Gyp.bundle/Test App Gyp',
chdir='app-bundle')
test.must_exist(result_file)
if configuration == 'Debug-iphoneos':
CheckFileType(result_file, 'armv7')
else:
CheckFileType(result_file, 'i386')
test.pass_test()
|
mdutkin/m2core | m2core/__init__.py | Python | mit | 171 | 0 | from m2core.m2core import M2Core, logger
from m2core import bases
from m2core import data_schemes
fro | m m2core | import db
from m2core import utils
from m2core import common
|
teeple/pns_server | work/install/Python-2.7.4/Lib/plat-irix5/CL_old.py | Python | gpl-2.0 | 6,162 | 0.003246 | #
# cl.h - Compression Library typedefs and prototypes
#
# 01/07/92 Cleanup by Brian Knittel
# 02/18/92 Original Version by Brian Knittel
#
#
# originalFormat parameter values
#
from warnings import warnpy3k
warnpy3k("the CL_old module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
MAX_NUMBER_OF_ORIGINAL_FORMATS = 32
# Audio
MONO = 0
STEREO_INTERLEAVED = 1
# Video
# YUV is defined to be the same thing as YCrCb (luma and two chroma components).
# 422 is appended to YUV (or YCrCb) if the chroma is sub-sampled by 2
# horizontally, packed as U Y1 V Y2 (byte order).
# 422HC is appended to YUV (or YCrCb) if the chroma is sub-sampled by 2
# vertically in addition to horizontally, and is packed the same as
# 422 except that U & V are not valid on the second line.
#
RGB = 0
RGBX = 1
RGBA = 2
RGB332 = 3
GRAYSCALE = 4
Y = 4
YUV = 5
YCbCr = 5
YUV422 = 6 # 4:2:2 sampling
YCbCr422 = 6 # 4:2:2 sampling
YUV422HC = 7 # 4:1:1 sampling
YCbCr422HC = 7 # 4:1:1 sampling
YUV422DC = 7 # 4:1:1 sampling
YCbCr422DC = 7 # 4:1:1 sampling
BEST_FIT = -1
def BytesPerSample(s):
if s in (MONO, YUV):
return 2
elif s == STEREO_INTERLEAVED:
return 4
else:
return 0
def BytesPerPixel(f):
if f in (RGB, YUV):
return 3
elif f in (RGBX, RGBA):
return 4
elif f in (RGB332, GRAYSCALE):
return 1
else:
return 2
def AudioFormatName(f):
if f == MONO:
return 'MONO'
elif f == STEREO_INTERLEAVED:
return 'STEREO_INTERLEAVED'
else:
return 'Not a valid format'
def VideoFormatName(f):
if f == RGB:
return 'RGB'
elif f == RGBX:
return 'RGBX'
elif f == RGBA:
return 'RGBA'
elif f == RGB332:
return 'RGB332'
elif f == GRAYSCALE:
return 'GRAYSCALE'
elif f == YUV:
return 'YUV'
elif f == YUV422:
return 'YUV422'
elif f == YUV422DC:
return 'YUV422DC'
else:
return 'Not a valid format'
MAX_NUMBER_OF_AUDIO_ALGORITHMS = 32
MAX_NUMBER_OF_VIDEO_ALGORITHMS = 32
#
# Algorithm types
#
AUDIO = 0
VIDEO = 1
def AlgorithmNumber(scheme):
return scheme & 0x7fff
def AlgorithmType(scheme):
return (scheme >> 15) & 1
def Algorithm(type, n):
return n | ((type & 1) << 15)
#
# "compressionScheme" argument values
#
UNKNOWN_SCHEME = -1
UNCOMPRESSED_AUDIO = Algorithm(AUDIO, 0)
G711_ULAW = Algorithm(AUDIO, 1)
ULAW = Algorithm(AUDIO, 1)
G711_ALAW = Algorithm(AUDIO, 2)
ALAW = Algorithm(AUDIO, 2)
AWARE_MPEG_AUDIO = Algorithm(AUDIO, 3)
AWARE_MULTIRATE = Algorithm(AUDIO, 4)
UNCOMPRESSED = Algorithm(VIDEO, 0)
UNCOMPRESSED_VIDEO = Algorithm(VIDEO, 0)
RLE = Algorithm(VIDEO, 1)
JPEG = Algorithm(VIDEO, 2)
MPEG_VIDEO = Algorithm(VIDEO, 3)
MVC1 = Algorithm(VIDEO, 4)
RTR = Algorithm(VIDEO, 5)
RTR1 = Algorithm(VIDEO, 5)
#
# Parameters
#
MAX_NUMBER_OF_PARAMS = 256
# Default Parameters
IMAGE_WIDTH = 0
IMAGE_HEIGHT = 1
ORIGINAL_FORMAT = 2
INTERNAL_FORMAT = 3
COMPONENTS = 4
BITS_PER_COMPONENT = 5
FRAME_RATE = 6
COMPRESSION_RATIO = 7
EXACT_COMPRESSION_RATIO = 8
FRAME_BUFFER_SIZE = 9
COMPRESSED_BUFFER_SIZE = 10
BLOCK_SIZE = 11
PREROLL = 12
FRAME_TYPE = 13
ALGORITHM_ID = 14
ALGORITHM_VERSION = 15
ORIENTATION = 16
NUMBER_OF_FRAMES = 17
SPEED = 18
LAST_FRAME_INDEX = 19
NUMBER_OF_PARAMS = 20
# JPEG Specific Parameters
QUALITY_FACTOR = NUMBER_OF_PARAMS + 0
# MPEG Specific Parameters
END_OF_SEQUENCE = NUMBER_OF_PARAMS + 0
# RTR Specific Parameters
QUALITY_LEVEL = NUMBER_OF_PARAMS + 0
ZOOM_X = NUMBER_OF_PARAMS + 1
ZOOM_Y = NUMBER_OF_PARAMS + 2
#
# Parameter value types
#
ENUM_VALUE = 0 # only certain constant values are valid
RANGE_VALUE = 1 # any value in a given range is valid
FLOATING_ENUM_VALUE = 2 # only certain constant floating point values are valid
FLOATING_RANGE_VALUE = 3 # any value in a given floating point range is valid
#
# Algorithm Functionality
#
DECOMPRESSOR = 1
COMPRESSOR = 2
CODEC = 3
#
# Buffer types
#
NONE = 0
FRAME = 1
DATA = 2
#
# Frame types
#
NONE = 0
KEYFRAME = 1
INTRA = 1
PREDICTED = 2
BIDIRECTIONAL = 3
#
# Orientations
#
TOP_DOWN = 0
BOTTOM_UP = 1
#
# SGI Proprietary Algorithm Header Start Code
#
HEADER_START_CODE = 0xc1C0DEC
#
# error codes
#
BAD_NO_BUFFERSPACE = -2 # no space for internal buffers
BAD_PVBUFFER = -3 # param/val buffer doesn't make sense
BAD_BUFFERLENGTH_NEG = -4 # negative buffer length
BAD_BUFFERLENGTH_ODD = -5 # odd length parameter/value buffer
BAD_PARAM = -6 | # invalid parameter
BAD_COMPRESSION_SCHEME = -7 # compression scheme parameter invalid
BAD_COMPRESSOR_HANDLE = -8 | # compression handle parameter invalid
BAD_COMPRESSOR_HANDLE_POINTER = -9 # compression handle pointer invalid
BAD_BUFFER_HANDLE = -10 # buffer handle invalid
BAD_BUFFER_QUERY_SIZE = -11 # buffer query size too large
JPEG_ERROR = -12 # error from libjpeg
BAD_FRAME_SIZE = -13 # frame size invalid
PARAM_OUT_OF_RANGE = -14 # parameter out of range
ADDED_ALGORITHM_ERROR = -15 # added algorithm had a unique error
BAD_ALGORITHM_TYPE = -16 # bad algorithm type
BAD_ALGORITHM_NAME = -17 # bad algorithm name
BAD_BUFFERING = -18 # bad buffering calls
BUFFER_NOT_CREATED = -19 # buffer not created
BAD_BUFFER_EXISTS = -20 # buffer already created
BAD_INTERNAL_FORMAT = -21 # invalid internal format
BAD_BUFFER_POINTER = -22 # invalid buffer pointer
FRAME_BUFFER_SIZE_ZERO = -23 # frame buffer has zero size
BAD_STREAM_HEADER = -24 # invalid stream header
BAD_LICENSE = -25 # netls license not valid
AWARE_ERROR = -26 # error from libawcmp
|
shankari/e-mission-server | emission/core/wrapper/battery.py | Python | bsd-3-clause | 1,602 | 0.01186 | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import logging
import emission.core.wrapper.wrapperbase as ecwb
import enum as enum
class BatteryStatus(enum.Enum):
UNKNOWN = 0
DISCHARGING = 1
CHARGING = 2
FULL = 3
NOT_CHARGING = 4 # This is an android-only state - unsure how often we will encounter it
class Battery(ecwb.WrapperBase):
props = {"battery_level_pct": ecwb.WrapperBase.Access.RO, # percentage of the battery left. value between 0 and 100
"battery_status": ecwb.WrapperBase.Access.RO, # Current status - charging, discharging or full
"android_health": ecwb.WrapperBase.Access.RO, # android-only battery health indicator
"android_plugged": ecwb.WrapperBase.Access.RO, # source that it is plugged into
"android_technology": ecwb.WrapperBase.Access.RO, # technolog | y used to make the battery
"android_temperature": ecwb.WrapperBase.Access.RO, # android-only: current temperature
"android_voltage": ecwb.WrapperBase.Access.RO, # android-only: current voltage
"ts": ecwb | .WrapperBase.Access.RO,
"local_dt": ecwb.WrapperBase.Access.RO,
"fmt_time": ecwb.WrapperBase.Access.RO
}
enums = {"battery_status": BatteryStatus}
geojson = []
nullable = []
local_dates = ['local_dt']
def _populateDependencies(self):
pass
|
jmartinm/InvenioAuthorLists | modules/bibcirculation/lib/bibcirculation_utils.py | Python | gpl-2.0 | 21,079 | 0.003985 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibCirculation Utils: Auxiliary methods of BibCirculation """
__revision__ = "$Id$"
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibtask import task_low_level_submission
import invenio.bibcirculation_dblayer as db
from invenio.urlutils import create_html_link
from invenio.config import CFG_SITE_URL, CFG_TMPDIR
from invenio.bibcirculation_config import CFG_BIBCIRCULATION_AMAZON_ACCESS_KEY, \
CFG_BIBCIRCULATION_WORKING_DAYS, \
CFG_BIBCIRCULATION_HOLIDAYS
from invenio.messages import gettext_set_language
import datetime, time
def hold_request_mail(recid, borrower_id):
"""
Create the mail who will be sent for each hold requests.
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param borrower_id: identify the borrower. Primary key of crcBORROWER.
@type borrower_id: int
@return email(body)
"""
(book_title, book_year, book_author,
book_isbn, book_editor) = book_information_from_MARC(recid)
############## need some code refactoring ###############
more_holdings_infos = db.get_holdings_details(recid)
borrower_infos = db.get_borrower_details(borrower_id)
#########################################################
title_link = create_html_link(CFG_SITE_URL +
'/admin/bibcirculation/bibcirculationadmin.py/get_item_details',
{'recid': recid},
(book_title))
out = """
This is an automatic email for confirming the hold request for a
book on behalf of:
%s (email: %s)
title: %s
author: %s
location: %s
library: %s
publisher: %s
year: %s
isbn: %s
""" % (borrower_infos[1], borrower_infos[2],
title_link, book_author, more_holdings_infos[0][1],
more_holdings_infos[0][2],
book_editor, book_year, book_isbn)
return out
def get_book_cover(isbn):
"""
Retrieve book cover using Amazon web services.
@param isbn: book's isbn
@type isbn: string
| @return book cov | er
"""
from xml.dom import minidom
import urllib
# connect to AWS
cover_xml = urllib.urlopen('http://ecs.amazonaws.com/onca/xml' \
'?Service=AWSECommerceService&AWSAccessKeyId=' \
+ CFG_BIBCIRCULATION_AMAZON_ACCESS_KEY + \
'&Operation=ItemSearch&Condition=All&' \
'ResponseGroup=Images&SearchIndex=Books&' \
'Keywords=' + isbn)
# parse XML
try:
xml_img = minidom.parse(cover_xml)
retrieve_book_cover = xml_img.getElementsByTagName('MediumImage')
book_cover = retrieve_book_cover.item(0).firstChild.firstChild.data
except AttributeError:
book_cover = "%s/img/book_cover_placeholder.gif" % (CFG_SITE_URL)
return book_cover
def book_information_from_MARC(recid):
"""
Retrieve book's information from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return tuple with title, year, author, isbn and editor.
"""
book_title = ' '.join(get_fieldvalues(recid, "245__a") + \
get_fieldvalues(recid, "245__b") + \
get_fieldvalues(recid, "245__n") + \
get_fieldvalues(recid, "245__p"))
book_year = ' '.join(get_fieldvalues(recid, "260__c"))
book_author = ' '.join(get_fieldvalues(recid, "100__a") + \
get_fieldvalues(recid, "100__u"))
book_isbn = ' '.join(get_fieldvalues(recid, "020__a"))
book_editor = ' , '.join(get_fieldvalues(recid, "260__a") + \
get_fieldvalues(recid, "260__b"))
return (book_title, book_year, book_author, book_isbn, book_editor)
def book_title_from_MARC(recid):
"""
Retrieve book's title from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return book's title
"""
book_title = ' '.join(get_fieldvalues(recid, "245__a") + \
get_fieldvalues(recid, "245__b") + \
get_fieldvalues(recid, "245__n") + \
get_fieldvalues(recid, "245__p"))
return book_title
def update_status_if_expired(loan_id):
"""
Update the loan's status if status is 'expired'.
@param loan_id: identify the loan. Primary key of crcLOAN.
@type loan_id: int
"""
loan_status = db.get_loan_status(loan_id)
if loan_status == 'expired':
db.update_loan_status('on loan', loan_id)
return
def get_next_day(date_string):
"""
Get the next day
@param date_string: date
@type date_string: string
return next day
"""
# add 1 day
more_1_day = datetime.timedelta(days=1)
# convert date_string to datetime format
tmp_date = time.strptime(date_string, '%Y-%m-%d')
# calculate the new date (next day)
next_day = datetime.datetime(*tmp_date[:3]) + more_1_day
return next_day
def generate_new_due_date(days):
"""
Generate a new due date (today + X days = new due date).
@param days: number of days
@type days: string
@return new due date
"""
today = datetime.date.today()
more_X_days = datetime.timedelta(days=days)
tmp_date = today + more_X_days
week_day = tmp_date.strftime('%A')
due_date = tmp_date.strftime('%Y-%m-%d')
due_date_validated = False
while not due_date_validated:
if week_day in CFG_BIBCIRCULATION_WORKING_DAYS and due_date not in CFG_BIBCIRCULATION_HOLIDAYS:
due_date_validated = True
else:
next_day = get_next_day(due_date)
due_date = next_day.strftime('%Y-%m-%d')
week_day = next_day.strftime('%A')
return due_date
def renew_loan_for_X_days(barcode):
"""
Renew a loan based on its loan period
@param barcode: identify the item. Primary key of crcITEM.
@type barcode: string
@return new due date
"""
loan_period = db.get_loan_period(barcode)
if loan_period == '4 weeks':
due_date = generate_new_due_date(30)
else:
due_date = generate_new_due_date(7)
return due_date
def make_copy_available(request_id):
"""
Change the status of a copy for 'available' when
an hold request was cancelled.
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
"""
barcode_requested = db.get_requested_barcode(request_id)
db.update_item_status('available', barcode_requested)
return
def print_new_loan_information(req, ln):
"""
Create a printable format with the information of the last
loan who has been registered on the table crcLOAN.
"""
_ = gettext_set_language(ln)
# get the last loan from crcLOAN
(recid, borrower_id, due_date) = db.get_last_loan()
# get book's information
(book_title, book_year, book_author, book_isbn, book_editor) = book_information_from_MARC(recid)
# get borrower's data/information (name, address, email)
(borrower_name, borrower_address, borrower_email) = db.get_borrower_da |
aoldoni/tetre | lib/parsers_cache.py | Python | mit | 1,577 | 0.001902 | import os
import pickle
from parsers_backend import get_tree
from directories import dirs
def get_cached_sentence_image(argv, output_path, img_path):
"""Returns if the image is already generated or not, and avoids generating if yes.
Args:
argv: The command line arguments | .
output_path: The path for the output folder.
img_path: The path to the image file to be checked.
Returns:
A boolean flagging if image is already gene | rated or not.
"""
cache_file_final = output_path + img_path
if argv.tetre_force_clean:
return False
else:
return os.path.isfile(cache_file_final)
def get_cached_tokens(argv):
"""Returns the already parsed sentences containing the word being search, if the folder was not modified.
Args:
argv: The command line arguments.
Returns:
A list of tree.FullSentence objects, the sentences parsed from the raw text.
"""
updated_at_date = os.path.getmtime(dirs['raw_input']['path'])
cache_key = argv.tetre_word.lower() + str(int(updated_at_date))
cache_file = dirs['output_cache']['path'] + cache_key + ".spacy"
if os.path.isfile(cache_file) and not argv.tetre_force_clean:
# is cached
with open(cache_file, 'rb') as f:
sentences = pickle.load(f)
else:
# is not cached, so generates it again
sentences = get_tree(argv)
# saves to disk
with open(cache_file, "wb") as f:
pickle.dump(sentences, f, protocol=pickle.HIGHEST_PROTOCOL)
return sentences
|
GNOME/mm-common | util/meson_aux/skeletonmm-tarball.py | Python | gpl-2.0 | 1,665 | 0.010811 | #!/usr/bin/env python3
# External command, intended to be called with run_command() or custom_target()
# in meson.build
# argv[1] argv[2] argv[3:]
# skeletonmm-tarball.p | y <output_file_or_check> <source_dir> <input_files...>
import os
import sys
import shutil
import tarfile
if sys.argv[1] == 'check':
# Called from run_command() during setup or config | uration.
# Check which archive format can be used.
# In order from most wanted to least wanted: .tar.xz, .tar.gz, .tar
available_archive_formats = []
for af in shutil.get_archive_formats():
# Keep the formats in a list, skip the descriptions.
available_archive_formats += [af[0]]
if 'xztar' in available_archive_formats:
suffix = '.tar.xz'
elif 'gztar' in available_archive_formats:
suffix = '.tar.gz'
else: # Uncompressed tar format is always available.
suffix = '.tar'
print(suffix, end='') # stdout can be read in the meson.build file.
sys.exit(0)
# Create an archive.
output_file = sys.argv[1]
source_dir = sys.argv[2]
if output_file.endswith('.xz'):
mode = 'w:xz'
elif output_file.endswith('.gz'):
mode = 'w:gz'
else:
mode = 'w'
with tarfile.open(output_file, mode=mode) as tar_file:
os.chdir(source_dir) # Input filenames are relative to source_dir.
for file in sys.argv[3:]:
tar_file.add(file)
# Errors raise exceptions. If an exception is raised, Meson+ninja will notice
# that the command failed, despite exit(0).
sys.exit(0)
# shutil.make_archive() might be an alternative, but it only archives
# whole directories. It's not useful, if you want to have full control
# of which files are archived.
|
eecsu/BET | examples/nonlinearMap/myModel.py | Python | gpl-3.0 | 1,989 | 0.003519 | # Copyright (C) 2016 The BET Development Team
# -*- coding: utf-8 -*-
import numpy as np
import math as m
'''
Suggested changes for user:
Try setting QoI_num = 2.
Play around with the x1, y1, and/or, x2, y2 values to try and
"optimize" the QoI to give the highest probability region
on the reference parameter above.
Hint: Try using QoI_num = 1 and systematically varying the
x1 and y1 values to find QoI with contour structures (as inferred
through the 2D marginal plots) that are nearly orthogonal.
Some interesting pairs of QoI to compare are:
(x1,y1)=(0.5,0.5) and (x2,y2)=(0.25,0.25)
(x1,y1)=(0.5,0.5) and (x2,y2)=(0.15,0.15)
(x1,y1)=(0.5,0.5) and (x2,y2)=(0.25,0.15)
'''
# Choose the number of QoI
QoI_num = 1
# Specify the spatial points to take measurements of solution defining the QoI
if QoI_num == 1:
x1 = 0.5
y1 = 0.5
x = np.array([x1])
y = np.array([y1])
else:
x1 = 0.5
y1 = 0.15
x2 = 0.15
y2 = 0.25
x = np.array([x1, x2])
y = np.array([y1, y2])
class QoI_component(object):
def __init__(self, x, y):
self.x = x
self.y = y
def eval(self, parameter_samples):
if parameter_samples.shape == (2,):
lam1 = parameter_samples[0]
lam2 = parameter_samples[1]
else:
lam1 = parameter_samples[:,0]
lam2 = parameter_samples[:,1]
z = np.sin(m.pi * self.x * lam1) * np.sin(m.pi * self.y * lam2)
return z
# Specify the Q | oI maps
if QoI_num == 1:
def QoI_map(parameter_samples):
Q1 = QoI_component(x[0], y[0])
return np.array([Q1.eval(parameter_samples)]).transpose()
else:
def QoI_map(parameter_samples):
Q1 = QoI_co | mponent(x[0], y[0])
Q2 = QoI_component(x[1], y[1])
return np.array([Q1.eval(parameter_samples), Q2.eval(parameter_samples)]).transpose()
# Define a model that is the QoI map
def my_model(parameter_samples):
QoI_samples = QoI_map(parameter_samples)
return QoI_samples
|
micropython-IMU/micropython-bmx055 | bmm050.py | Python | mit | 2,581 | 0.001162 | """
bmm050 is a micropython module for the Bosch BMM050 sensor.
It measures the magnetic field in three axis.
The MIT License (MIT)
Copyright (c) 2016 Sebastian Plamauer oeplse@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO | THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | THE
SOFTWARE.
"""
from time import sleep
# from stackoverflow J.F. Sebastian
def _twos_comp(val, bits=8):
"""
compute the 2's complement of int val with bits
"""
if (val & (1 << (bits - 1))) != 0: # if sign bit is set
val = val - (1 << bits) # compute negative value
return val # return positive value as is
class BMM050():
"""
Class for BMM050 magnetometer
"""
def __init__(self, i2c, addr):
"""
Initializes with an I2C object and address as arguments.
"""
self.i2c = i2c
self.mag_addr = addr
self.chip_id = i2c.readfrom_mem(self.mag_addr, 0x40, 1)[0]
self.i2c.writeto_mem(self.mag_addr, 0x4B, b'\x01')
self.i2c.writeto_mem(self.mag_addr, 0x4C, b'\x00')
def _read_mag(self, addr, shift):
"""
return mag data from addr
"""
LSB, MSB = self.i2c.readfrom_mem(self.mag_addr, addr, 2)
LSB = _twos_comp(LSB & 0b11111110)
MSB = _twos_comp(MSB)
return (LSB + (MSB<<shift)) / 16
def _res(self):
return self._read_mag(0x48, 6)
def x(self):
return self._read_mag(0x42, 5)
def y(self):
return self._read_mag(0x44, 5)
def z(self):
return self._read_mag(0x46, 7)
def xyz(self) -> tuple:
return (self.x(), self.y(), self.z())
|
cosmolab/cosmogenic | cosmogenic/sim.py | Python | bsd-2-clause | 6,358 | 0.001101 | """
Simulate geomorphic scenarios along with CN production.
"""
from __future__ import division, print_function, unicode_literals
import numpy as np
import scipy.integrate
from cosmogenic import production
def nexpose(n, z, ti, tf=0, p=None, tol=1e-4, thickness=None):
"""
Calculate concentrations for an arbitrary depth history z(t).
:math:`\\int^t_0 P(z(t)) \\exp(-\\lambda t) dt`
Parameters
----------
n : cosmogenic.nuclide object
z : function or callable
z(t), Depth in g/cm**2 as a function of time t in years. Time decreases
until the present.
ti : float
initial exposure age (years ago)
tf : float
time when exposure stopped (years ago)
p : function or callable (optional)
P(z), production rate of nuclide in atoms/g/year as function of depth
in g/cm**2. If not supplied, n.production_rate is used.
tol : float
error tolerance for the integration
thickness : float (optional)
sample thickness in g/cm**2
Returns
-------
(C, err) : tuple
C is the concentration in atoms/g
err is an estimate of the absolute error in C [atoms/g]
"""
if p is None:
p = n.production_rate
# define the integrand: instantaneous production and decay
def P(t):
return p(z(t)) * np.exp(-n.LAMBDA * t)
if thickness is None:
res = scipy.integrate.quad(P, tf, ti, epsrel=tol)
else:
bottom_z = lambda t: z(t) + thickness
p2d = lambda z, t: p(z) * np.exp(-n.LAMBDA * t) / thickness
res = scipy.integrate.dblquad(p2d, tf, ti, z, bottom_z, epsrel=tol)
C = res[0]
err = res[1]
return C, err
def multiglaciate(dz, t_gl, t_intergl, t_postgl, z, n, p=None, n_gl=None,
postgl_shielding=0):
"""Find the resulting concentration profile for a glacial history and site.
This function predicts the concentration profile for a glacial history. The
glacial history of the site is described in such a way that the parameters
are easy to vary for the Monte Carlo simulation--i.e. the times of
glacial and interglacial periods are in lengths rather than absolute ages.
Depths of the sample and the depths eroded during each glaciation are both
in units of g/cm**2, avoiding tying our results to a rock density.
Parameters
----------
dz : vector of the depths eroded during each glaciation (g/cm2)
t_gl : array_like or scalar
array of lengths of time spent ice covered in each glaciation (yr)
t_intergl : array_like or scalar
vector, length of exposure periods (yr)
t_postgl : float
time the sample has been exposed since deglaciation (yr)
z : array_like or scalar
array of samples depths beneath the modern surface (g/cm**2)
n : nuclide object
p : function or callable
production rate function p(z), should return a production rate in
atoms/g/year at depth z (in g/cm*2).
n_gl : int, optional
If supplied, this is the number of glaciations to simulate
assuming that t_gl and t_intergl are scalars, not vectors.
"""
z = np.atleast_1d(z)
dz = np.atleast_1d(dz)
t_gl = np.atleast_1d(t_gl)
t_intergl = np.atleast_1d(t_intergl)
t_postgl = np.atleast_1d(t_postgl)
if p is None:
p = n.production_rate
if n_gl is None:
n_gl = max(dz.size, t_gl.size, t_intergl.size)
ones = np.ones(n_gl)
dz = dz * ones if dz.size is not n_gl else dz
t_gl = t_gl * ones if t_gl.size is not n_gl else t_gl
t_intergl = (t_intergl * ones if t_intergl.size is not n_gl
else t_intergl)
assert dz.size == t_gl.size == t_intergl.size
# add the atoms created as we go back in time
# recent interglacial first
conc = expose(n, z + postgl_shielding, t_postgl, p=p)
z_cur = z.copy() # start at current depths
t_begint = t_postgl # the time when the current interglacial began
t_endint = 0.0 # time (now) when current interglacial ended
for i in range(n_gl):
z_cur += dz[i] # go back to depth and time before glacial erosion
t_endint = t_begint + t_gl[i]
t_begint = t_endint + t_intergl[i]
conc += expose(n, z_cur, t_begint, t_endint, p)
return conc
def glacial_depth_v_time(gl, intergl, postgl, dz, n_gl=None):
""" Returns a tuple of times and depths of a surface sample.
Parameters
----------
gl : array_like
vector of lengths of each glaciation (yr)
intergl: vector of lengths of interglacial periods (yr)
postgl: time since last deglaciation (yr)
dz: vector of glacial erosion depths during each glaciation
Returns
-------
"""
gl = np.atleast_1d(gl)
intergl = np.atleast_1d(intergl)
dz = np.atleast_1d(dz)
if n_gl is None:
n_gl = max(gl.size, intergl.size, dz.size)
# pad them all out to be the right size
| gl = gl * np.ones(n_gl)
intergl = intergl * np.ones(n_gl)
dz = dz * np.ones(n_gl)
# interleave the two arrays
tmp = np.column_stack((gl, intergl)).reshape(1, gl.size * 2).flatten()
t = np.add.accumulate(np.concatenate(([0, postgl], tmp)))
tmp = np.column_stack((dz, np.zeros(dz.size))).reshape(
1, dz.size * 2).flatten()
z = np.add.accumulate(np.concatenate(([0, 0], tmp)))
return (t, z)
def expose(n, z, ti, tf=0, p=None):
"""
E | xpose samples a depths z (g/cm**2) from time ti until time tf
(both in years) at production rate p(z). Adjust their concentrations for
radioactive decay since tf. Return the concentration of nuclide n.
If p is not supplied, n.production_rate is used.
"""
if p is None:
p = n.production_rate
# See note in simple_expose for why we must assign this temporary.
pofz = p(z)
L = n.LAMBDA
conc = (pofz / L) * (np.exp(-L * tf) - np.exp(-L * ti))
return conc
def steady_erosion(P, z0, eros, nuc, T, T_stop=0):
def int_eq(t):
return P(z(t)) * np.exp(-nuc.LAMBDA * t)
z0 = np.atleast_1d(z0)
N = np.zeros_like(z0)
for i, depth in enumerate(z0):
z = lambda t: eros * t + depth
res, _ = scipy.integrate.quad(int_eq, T_stop, T)
N[i] = res
return N
|
2gis/vmmaster | tests/unit/test_commands.py | Python | mit | 16,406 | 0.000488 | # coding: utf-8
import copy
import json
from mock import Mock, PropertyMock, patch
from tests.helpers import Handler, BaseTestCase, ServerMock, get_free_port, DatabaseMock
from core.exceptions import CreationException, ConnectionError, \
SessionException, TimeoutException
from core.config import setup_config, config
from flask import Flask
class CommonCommandsTestCase(BaseTestCase):
webdriver_server = None
vmmaster_agent = None
vnc_server = None
host = 'localhost'
@classmethod
def setUpClass(cls):
setup_config("data/config_openstack.py")
body = {
"sessionId": None,
"desiredCapabilities": {
"platform": "some_platform",
"browserName": "firefox",
"version": "",
"javascriptEnabled": True
}
}
session_request_body = json.dumps(body)
session_request_headers = {
'content-length': '%s' % len(session_request_body),
'accept-encoding': 'identity',
'Connection': 'close',
'accept': 'application/json',
'user-agent': 'Python-urllib/2.7',
'host': '127.0.0.1:9000',
'content-type': 'application/json;charset=UTF-8',
}
cls.request = Mock()
cls.request.method = "POST"
cls.request.path = "/wd/hub/session"
cls.request.headers = dict()
cls.request.headers.update(session_request_headers)
cls.request.data = session_request_body
cls.webdriver_server = ServerMock(cls.host, get_free_port())
cls.webdriver_server.start()
cls.vmmaster_agent = ServerMock(cls.host, get_free_port())
cls.vmmaster_agent.start()
cls.vnc_server = ServerMock(cls.host, get_free_port())
cls.vnc_server.start()
cls.app = Flask(__name__)
cls.app.database = None
cls.app.sessions = None
cls.app.database_task_queue = Mock()
cls.app.pool = Mock()
def setUp(self):
self.ctx = self.app.test_request_context()
self.ctx.push()
with patch(
'flask.current_app.database', DatabaseMock()
), patch(
'flask.current_app.sessions', Mo | ck()
):
from core.db.models import Session, Provider, Endpoint
self.session = Session('origin_1')
self.session.name = "session1"
provider = Provider(name='noname', url='nourl')
vm = Endpoint(Mock(), '', pr | ovider)
vm.name = 'vm1'
vm.ip = self.host
vm.ports = {
'selenium': self.webdriver_server.port,
'agent': self.vmmaster_agent.port,
'vnc': self.vnc_server.port
}
self.session.endpoint = vm
self.session.run()
from vmmaster.webdriver import commands
self.commands = commands
def tearDown(self):
with patch(
'flask.current_app.sessions', Mock()
), patch(
'flask.current_app.database', Mock()
):
self.session._close()
self.ctx.pop()
@classmethod
def tearDownClass(cls):
cls.webdriver_server.stop()
cls.vmmaster_agent.stop()
cls.vnc_server.stop()
del cls.app
def ping_vm_mock(arg, ports=None):
yield None
def selenium_status_mock(arg1, arg2, arg3):
yield None
@patch(
'vmmaster.webdriver.commands.start_selenium_session', new=Mock(
__name__="start_selenium_session",
side_effect=selenium_status_mock
)
)
@patch(
'vmmaster.webdriver.commands.ping_endpoint_before_start_session',
new=Mock(__name__="ping_endpoint_before_start_session", side_effect=ping_vm_mock)
)
@patch(
'vmmaster.webdriver.helpers.is_request_closed',
Mock(return_value=False)
)
@patch('flask.current_app.database', Mock())
class TestStartSessionCommands(CommonCommandsTestCase):
def setUp(self):
super(TestStartSessionCommands, self).setUp()
self.session.dc = Mock(__name__="dc")
def test_start_session_when_selenium_status_failed(self):
request = copy.copy(self.request)
def make_request_mock(arg1, arg2):
yield 200, {}, json.dumps({'status': 1})
with patch(
'core.db.models.Session.make_request', Mock(
__name__="make_request",
side_effect=make_request_mock
)
):
self.assertRaises(
CreationException, self.commands.start_session,
request, self.session
)
@patch(
'vmmaster.webdriver.helpers.is_session_timeouted',
Mock(return_value=True)
)
@patch(
'requests.request', Mock(side_effect=Mock(
__name__="request",
return_value=(200, {}, json.dumps({'status': 0}))))
)
def test_start_session_when_session_was_timeouted(self):
request = copy.copy(self.request)
self.assertRaises(TimeoutException, self.commands.start_session,
request, self.session)
@patch(
'vmmaster.webdriver.helpers.is_session_closed',
Mock(return_value=True)
)
@patch(
'requests.request', Mock(side_effect=Mock(
__name__="request",
return_value=(200, {}, json.dumps({'status': 0}))))
)
def test_start_session_when_session_was_closed(self):
request = copy.copy(self.request)
self.assertRaises(SessionException, self.commands.start_session,
request, self.session)
@patch('flask.current_app.database', Mock())
class TestStartSeleniumSessionCommands(CommonCommandsTestCase):
@patch(
'vmmaster.webdriver.helpers.is_request_closed',
Mock(return_value=False)
)
@patch("vmmaster.webdriver.commands.ping_endpoint_before_start_session", Mock())
def test_session_response_success(self):
request = copy.deepcopy(self.request)
request.headers.update({"reply": "200"})
status, headers, body = self.commands.start_selenium_session(
request, self.session
)
self.assertEqual(status, 200)
request_headers = dict((key.lower(), value) for key, value in
request.headers.iteritems())
for key, value in headers.iteritems():
if key == 'server' or key == 'date':
continue
self.assertDictContainsSubset({key: value}, request_headers)
self.assertEqual(body, request.data)
@patch(
'vmmaster.webdriver.helpers.is_request_closed',
Mock(return_value=False)
)
@patch("vmmaster.webdriver.commands.ping_endpoint_before_start_session", Mock())
def test_session_response_fail(self):
request = copy.deepcopy(self.request)
request.headers.update({"reply": "500"})
def start_selenium_session(req):
for result in self.commands.start_selenium_session(
req, self.session
):
pass
self.assertRaises(CreationException, start_selenium_session, request)
@patch(
'vmmaster.webdriver.helpers.is_request_closed',
Mock(return_value=True)
)
def test_start_selenium_session_when_connection_closed(self):
self.session.closed = True
request = copy.deepcopy(self.request)
request.headers.update({"reply": "200"})
self.assertRaises(
ConnectionError, self.commands.start_selenium_session,
request, self.session
)
@patch(
'vmmaster.webdriver.helpers.is_request_closed',
Mock(return_value=False)
)
@patch(
'vmmaster.webdriver.helpers.is_session_closed',
Mock(return_value=True)
)
def test_start_selenium_session_when_session_closed(self):
self.session.closed = True
request = copy.deepcopy(self.request)
request.headers.update({"reply": "200"})
self.assertRaises(
SessionException, self.commands.start_selenium_session,
request, self.session
)
@p |
airanmehr/bio | Scripts/KyrgysHAPH/GenomeAFS.py | Python | mit | 1,365 | 0.017582 | '''
Copyleft Feb 11, 2017 Arya Iranmehr, PhD Student, Bafna Lab, UC San Diego, Email: airanmehr@gmail.com
'''
import numpy as np;
np.set_printoptions(linewidth=200, precision=5, suppress=True)
import pandas as pd;
pd.options.display.max_rows = 20;
pd.options.display.expand_frame_repr = False
import pylab as plt;
import os;
home = os.path.expanduser('~') + '/'
import Utils.Estimate as est
import Utils.Plots as pplt
import Scripts.KyrgysHAPH.Utils as kutl
import Scripts.KyrgysHAPH.Plot as kplt
kplt.savefig()
reload(est)
a=pd.read_pickle(kutl.path+'/data/freq.df')
def plotSFSall(chrom=None):
f=est.Estimate.getSAFS
a=pd.read_pickle(kutl.path+'/data/freq.df')
if chrom is not None:
suff='.chr{}'.format(chrom)
a=a.loc[[chrom]]
kplt.plotSFSold2(a, fold=False, fname='AFS' + suff);
kplt.plotSFSold2(a, fold=False, fname='Scaled-AFS' + suff, f=f)
kplt.plotSFSold2(a, fold=True, fname='AFS' + suff, );
kplt.plotSFSold2(a, fold=True, fname='Scaled-AFS' + suff, f=f)
def plotChromAll():
a.apply(lambda x: kplt.SFSChromosomwise(x, False, False))
a.apply(lambda x: kplt.SFSChromosomwise(x, False, True))
a.apply(lambda x: kplt.SFSChromosomwise(x, True, Fa | lse))
a.apply(lambda x: kplt. | SFSChromosomwise(x, True, True))
def SFS():
plotSFSall()
plotSFSall('X')
plotSFSall('Y')
plotChromAll() |
corymintz/mtools | mtools/util/logfile.py | Python | apache-2.0 | 10,383 | 0.003949 | from mtools.util.logevent import LogEvent
from mtools.util.input_source import InputSource
from math import ceil
from datetime import datetime
import time
import re
class LogFile(InputSource):
""" wrapper class for log files, either as open file streams of from stdin. """
def __init__(self, filehandle):
""" provide logfile as open file stream or stdin. """
self.filehandle = filehandle
self.name = filehandle.name
self.from_stdin = filehandle.name == "<stdin>"
self._start = None
self._end = None
self._filesize = None
self._num_lines = None
self._restarts = None
self._binary = None
self._timezone = None
self._datetime_format = None
self._year_rollover = None
# make sure bounds are calculated before starting to iterate, including potential year rollovers
self._calculate_bounds()
@property
def start(self):
""" lazy evaluation of start and end of logfile. Returns None for stdin input currently. """
if not self._start:
self._calculate_bounds()
return self._start
@property
def end(self):
""" lazy evaluation of start and end of logfile. Returns None for stdin input currently. """
if not self._end:
self._calculate_bounds()
return self._end
@property
def timezone(self):
""" lazy evaluation of timezone of logfile. """
if not self._timezone:
self._calculate_bounds()
return self._timezone
@property
def filesize(self):
""" lazy evaluation of start and end of logfile. Returns None for stdin input currently. """
if self.from_stdin:
return None
if not self._filesize:
self._calculate_bounds()
return self._filesize
@property
def datetime_format(self):
""" lazy evaluation of the datetime format. """
if not self._datetime_format:
self._calculate_bounds()
return self._datetime_format
@property
def year_rollover(self):
""" lazy evaluation of the datetime format. """
if self._year_rollover == None:
self._calculate_bounds()
return self._year_rollover
@property
def num_lines(self):
""" lazy evaluation of the number of lines. Returns None for stdin input currently. """
if self.from_stdin:
return None
if not self._num_lines:
self._iterate_lines()
return self._num_lines
@property
def restarts(self):
""" lazy evaluation of all restarts. """
if not self._num_lines:
self._iterate_lines()
return self._restarts
@property
def binary(self):
""" lazy evaluation of the binary name. """
if not self._num_lines:
self._iterate_lines()
return self._binary
@property
def versions(self):
""" return all version changes. """
versions = []
for v, _ in self.restarts:
if len(versions) == 0 or v != versions[-1]:
versions.append(v)
return versions
def next(self):
""" get next line, adjust for year rollover and hint datetime format. """
# use readline here because next() iterator uses internal readahead buffer so seek position is | wrong
line = self.filehandle.readline()
if line == '':
raise StopIteration
line = line.rstrip('\n')
le = LogEvent(line)
# hint format and nextpos from previous line
if self._datetime_format and self._datetime_nextpos != None:
ret = le.set_datetime_hint(self._datetime_format, self._datetime_nextpos, self.year_rollover)
| if not ret:
# logevent indicates timestamp format has changed, invalidate hint info
self._datetime_format = None
self._datetime_nextpos = None
elif le.datetime:
# print "not hinting"
# gather new hint info from another logevent
self._datetime_format = le.datetime_format
self._datetime_nextpos = le._datetime_nextpos
return le
def __iter__(self):
""" iteration over LogFile object will return a LogEvent object for each line (generator) """
le = None
while True:
try:
le = self.next()
except StopIteration as e:
# end of log file, get end date
if not self.end and self.from_stdin:
if le and le.datetime:
self._end = le.datetime
# future iterations start from the beginning
if not self.from_stdin:
self.filehandle.seek(0)
# now raise StopIteration exception
raise e
# get start date for stdin input
if not self.start and self.from_stdin:
if le and le.datetime:
self._start = le.datetime
yield le
def __len__(self):
""" return the number of lines in a log file. """
return self.num_lines
def _iterate_lines(self):
""" count number of lines (can be expensive). """
self._num_lines = 0
self._restarts = []
l = 0
for l, line in enumerate(self.filehandle):
# find version string
if "version" in line:
restart = None
# differentiate between different variations
if "mongos" in line or "MongoS" in line:
self._binary = 'mongos'
elif "db version v" in line:
self._binary = 'mongod'
else:
continue
version = re.search(r'(\d\.\d\.\d+)', line)
if version:
version = version.group(1)
restart = (version, LogEvent(line))
self._restarts.append(restart)
self._num_lines = l+1
# reset logfile
self.filehandle.seek(0)
def _calculate_bounds(self):
""" calculate beginning and end of logfile. """
if self.from_stdin:
return False
# get start datetime
for line in self.filehandle:
logevent = LogEvent(line)
if logevent.datetime:
self._start = logevent.datetime
self._timezone = logevent.datetime.tzinfo
self._datetime_format = logevent.datetime_format
self._datetime_nextpos = logevent._datetime_nextpos
break
# get end datetime (lines are at most 10k, go back 30k at most to make sure we catch one)
self.filehandle.seek(0, 2)
self._filesize = self.filehandle.tell()
self.filehandle.seek(-min(self._filesize, 30000), 2)
for line in reversed(self.filehandle.readlines()):
logevent = LogEvent(line)
if logevent.datetime:
self._end = logevent.datetime
break
# if there was a roll-over, subtract 1 year from start time
if self._end < self._start:
self._start = self._start.replace(year=self._start.year-1)
self._year_rollover = self._end
else:
self._year_rollover = False
# reset logfile
self.filehandle.seek(0)
return True
def _find_curr_line(self, prev=False):
""" internal helper function that finds the current (or previous if prev=True) line in a log file
based on the current seek position.
"""
curr_pos = self.filehandle.tell()
line = None
# jump back 15k characters (at most) and find last newline char
jump_back = min(self.filehandle.tell(), 15000)
self.filehandle.seek(-jump_back, 1)
buff = self.filehandle.read(jump_back)
self.filehandle.seek(curr_pos, 0)
newline_pos = buff.rfind('\n')
if prev:
newline_pos = bu |
ox-it/humfrey | humfrey/linkeddata/uri.py | Python | bsd-3-clause | 3,761 | 0.002925 | import re
import urllib
import urlparse
try:
from urlparse import parse_qs
except ImportError:
from cgi import parse_qs
import rdflib
from django.conf import settings
from django.core.urlresolvers import reverse
if 'django_hosts' in settings.INSTALLED_APPS:
from django_hosts.reverse import reverse_full
with_hosts = True
else:
def reverse_full(host, *args, **kwargs):
return reverse(*args, **kwargs)
with_hosts = False
from .mappingconf import get_id_mapping, get_doc_view, get_desc_view
class DocURLs(object):
def __init__(self, base, forma | t_pattern):
self._base = base
self._format_pattern = format_pattern
def __getitem__(self, format):
if format is None:
return self._base
else:
return self._format_pattern % {'format': format}
def doc_forwards(uri, graph=None, described=None):
"""
Determines all doc URLs for a URI.
graph is an rdflib.Co | njunctiveGraph that can be checked for a description
of uri. described is a ternary boolean (None for 'unknown').
"""
if isinstance(uri, unicode):
encoded_uri = uri.encode('utf-8')
else:
encoded_uri = urllib.unquote(uri)
for id_prefix, doc_prefix, _ in get_id_mapping():
if uri.startswith(id_prefix):
base = doc_prefix + urllib.quote(encoded_uri[len(id_prefix):])
pattern = base.replace('%', '%%') + '.%(format)s'
return DocURLs(base, pattern)
if graph is not None and not described and any(graph.triples((uri, None, None))):
described = True
if described == False:
return DocURLs(encoded_uri, encoded_uri.replace('%', '%%'))
url = get_doc_view() if described else get_desc_view()
if isinstance(url, tuple):
# This used to return a tuple, now it returns the URL directly
url = reverse_full(*url)
params = [('uri', encoded_uri)]
if not described:
from humfrey.desc.views import DescView
params.append(('token', DescView.get_uri_token(encoded_uri)))
base = '%s?%s' % (url, urllib.urlencode(params))
print base
return DocURLs(base,
'%s&format=%%(format)s' % base.replace('%', '%%'))
def doc_forward(uri, graph=None, described=None, format=None):
return doc_forwards(uri, graph, described)[format]
BACKWARD_FORMAT_RE = re.compile(r'^(?P<url>.*?)(?:\.(?P<format>[a-z\d\-]+))?$')
def _get_host_path(url):
parsed_url = urlparse.urlparse(url)
return '//{0}{1}'.format(parsed_url.netloc, parsed_url.path)
def doc_backward(url, formats=None):
"""
Determines the URI a doc page is about.
Returns a tuple of (uri, format, canonical).
"""
parsed_url = urlparse.urlparse(url)
query = parse_qs(parsed_url.query)
doc_view_url = get_doc_view()
if isinstance(doc_view_url, tuple):
doc_view_url = reverse_full(*doc_view_url)
if _get_host_path(url) == urlparse.urljoin(_get_host_path(url), doc_view_url):
return rdflib.URIRef(query.get('uri', [None])[0] or ''), query.get('format', [None])[0], False
match = BACKWARD_FORMAT_RE.match(url)
url, format = match.group('url'), match.group('format')
if format and formats is not None and format not in formats:
url, format = '%s.%s' % (url, format), None
if with_hosts:
url_part = url
else:
url_part = urlparse.urlparse(url).path
for id_prefix, doc_prefix, is_local in get_id_mapping():
doc_prefix = urlparse.urljoin(url, doc_prefix)
if url_part.startswith(doc_prefix):
url_part = id_prefix + url_part[len(doc_prefix):]
return rdflib.URIRef(urllib.unquote(url_part)), format, is_local
else:
return None, None, None
|
dajusc/trimesh | trimesh/exchange/dae.py | Python | mit | 13,245 | 0.000151 | import io
import copy
import uuid
import numpy as np
try:
# pip install pycollada
import collada
except BaseException:
collada = None
from .. import util
from .. import visual
from ..constants import log
def load_collada(file_obj, resolver=None, **kwargs):
"""
Load a COLLADA (.dae) file into a list of trimesh kwargs.
Parameters
----------
file_obj : file object
Containing a COLLADA file
resolver : trimesh.visual.Resolver or None
For loading referenced files, like texture images
kwargs : **
Passed to trimesh.Trimesh.__init__
Returns
-------
loaded : list of dict
kwargs for Trimesh constructor
"""
# load scene using pycollada
c = collada.Collada(file_obj)
# Create material map from Material ID to trimesh material
material_map = {}
for m in c.materials:
effect = m.effect
material_map[m.id] = _parse_material(effect, resolver)
# name : kwargs
meshes = {}
# list of dict
graph = []
for node in c.scene.nodes:
_parse_node(node=node,
parent_matrix=np.eye(4),
material_map=material_map,
meshes=meshes,
graph=graph,
resolver=resolver)
# create kwargs for load_kwargs
result = {'class': 'Scene',
'graph': graph,
'geometry': meshes}
return result
def export_collada(mesh, **kwargs):
"""
Export a mesh or a list of meshes as a COLLADA .dae file.
Parameters
-----------
mesh: Trimesh object or list of Trimesh objects
The mesh(es) to export.
Returns
-----------
export: str, string of COLLADA format output
"""
meshes = mesh
if not isinstance(mesh, (list, tuple, set, np.ndarray)):
meshes = [mesh]
c = collada.Collada()
nodes = []
for i, m in enumerate(meshes):
# Load uv, colors, materials
uv = None
colors = None
mat = _unparse_material(None)
if m.visual.defined:
if m.visual.kind == 'texture':
mat = _unparse_material(m.visual.material)
uv = m.visual.uv
elif m.visual.kind == 'vertex':
colors = (m.visual.vertex_colors / 255.0)[:, :3]
c.effects.append(mat.effect)
c.materials.append(mat)
# Create geometry object
vertices = collada.source.FloatSource(
'verts-array', m.vertices.flatten(), ('X', 'Y', 'Z'))
normals = collada.source.FloatSource(
'normals-array', m.vertex_normals.flatten(), ('X', 'Y', 'Z'))
input_list = collada.source.InputList()
input_list.addInput(0, 'VERTEX', '#verts-array')
input_list.addInput(1, 'NORMAL', '#normals-array')
arrays = [vertices, normals]
if uv is not None:
texcoords = collada.source.FloatSource(
'texcoords-array', uv.flatten(), ('U', 'V'))
input_list.addInput(2, 'TEXCOORD', '#texcoords-array')
arrays.append(texcoords)
if colors is not None:
idx = 2
if uv:
idx = 3
colors = collada.source.FloatSource('colors-array',
colors.flatten(), ('R', 'G', 'B'))
input_list.addInput(idx, 'COLOR', '#colors-array')
arrays.append(colors)
geom = collada.geometry.Geometry(
c, uuid.uuid4().hex, uuid.uuid4().hex, arrays
)
indices = np.repeat(m.faces.flatten(), len(arrays))
matref = u'material{}'.format(i)
triset = geom.createTriangleSet(indices, input_list, matref)
geom.primitives.append(triset)
c.geometries.append(geom)
matnode = collada.scene.MaterialNode(matref, mat, inputs=[])
geomnode = collada.scene.GeometryNode(geom, [matnode])
node = collada.scene.Node(u'node{}'.format(i), children=[geomnode])
nodes.append(node)
scene = collada.scene.Scene('scene', nodes)
c.scenes.append(scene)
c.scene = scene
b = io.BytesIO()
c.write(b)
b.seek(0)
return b.read()
def _parse_node(node,
parent_matrix,
material_map,
meshes,
graph,
resolver=None):
"""
Recursively parse COLLADA scene nodes.
"""
# Parse mesh node
if isinstance(node, collada.scene.GeometryNode):
geometry = node.geometry
# Create local material map from material symbol to actual material
local_material_map = {}
for mn in node.materials:
symbol = mn.symbol
m = mn.target
if m.id in material_map:
local_material_map[symbol] = material_map[m.id]
else:
local_material_map[symbol] = _parse_material(m, resolver)
# Iterate over primitives of geometry
for i, primitive in enumerate(geometry.primitives):
if isinstance(primitive, collada.polylist.Polylist):
primitive = primitive.triangleset()
if isinstance(primitive, collada.triangleset.TriangleSet):
vertex = primitive.vertex
vertex_index = primitive.vertex_index
vertices = vertex[vertex_index].reshape(
len(vertex_index) * 3, 3)
# Get normals if present
normals = None
if primitive.normal is not None:
normal = primitive.normal
normal_index = primitive.normal_index
normals = normal[normal_index].reshape(
len(normal_index) * 3, 3)
# Get colors if present
colors = None
s = primitive.sources
if ('COLOR' in s and len(s['COLOR'])
> 0 and len(primitive.index) > 0):
color = s['COLOR'][0][4].data
color_index = primitive.index[:, :, s['COLOR'][0][0]]
colors = color[color_index].reshape(
len(color_index) * 3, 3)
faces = np.arange(
vertices.shape[0]).reshape(
vertices.shape[0] // 3, 3)
| # Get UV coordinates if possible
vis = None
if primitive.material in local_material_map:
mater | ial = copy.copy(
local_material_map[primitive.material])
uv = None
if len(primitive.texcoordset) > 0:
texcoord = primitive.texcoordset[0]
texcoord_index = primitive.texcoord_indexset[0]
uv = texcoord[texcoord_index].reshape(
(len(texcoord_index) * 3, 2))
vis = visual.texture.TextureVisuals(
uv=uv, material=material)
primid = u'{}.{}'.format(geometry.id, i)
meshes[primid] = {
'vertices': vertices,
'faces': faces,
'vertex_normals': normals,
'vertex_colors': colors,
'visual': vis}
graph.append({'frame_to': primid,
'matrix': parent_matrix,
'geometry': primid})
# recurse down tree for nodes with children
elif isinstance(node, collada.scene.Node):
if node.children is not None:
for child in node.children:
# create the new matrix
matrix = np.dot(parent_matrix, node.matrix)
# parse the child node
_parse_node(
node=child,
parent_matrix=matrix,
material_map=material_map,
meshes=meshes,
graph=graph,
resolver=resolver)
elif isinstance(node, collada.scene.CameraNode):
# TODO: convert collada cameras to trimesh cameras
pass
el |
iankronquist/numinous-nimbus | cs290/howto/site/pelicanconf.py | Python | mit | 883 | 0.001133 | #!/usr/bin/env python
# -*- coding: utf-8 -*- | #
from __future__ import unicode_literals
AUTHOR = u'Ian Kronquist'
SITENAME = u'CS290 How-To Guide'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'America/Los_Angeles'
DEFAULT_L | ANG = u'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = False
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
mlperf/training_results_v0.6 | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/topi/tests/python/test_topi_bitserial_conv2d.py | Python | apache-2.0 | 4,850 | 0.008247 | import numpy as np
import tvm
import topi
import topi.testing
from topi.util import get_const_tuple
from tvm.contrib.pickle_memoize import memoize
def generate_quantized_np(shape, bits, out_dtype):
min_val = 0
max_val = 1 << bits
return np.random.randint(min_val, max_val, size=shape).astype(out_dtype)
def verify_bitserial_conv2d_nchw(batch, in_size, in_channel, num_filter, kernel, stride, padding,
activation_bits, weight_bits, dorefa):
in_height = in_width = in_size
input_type = 'uint32'
out_dtype = 'int32'
with tvm.target.create('llvm'):
A = tvm.placeholder((batch, in_channel, in_height, in_width), dtype=input_type, name='A')
W = tvm.placeholder((num_filter, in_channel, kernel, kernel), dtype=input_type, name='W')
B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits,
out_dtype=out_dtype, layout="NCHW", dorefa=dorefa)
s = top | i.generic.schedule_bitserial_conv2d_nchw([B])
a_shape = get_const_tuple(A.shape)
w_shape = get_const_tuple(W.shape)
@memoize("topi.tests.test_topi_bitseral_ | conv2d_nchw")
def get_ref_data():
a_np = generate_quantized_np(get_const_tuple(a_shape), activation_bits, input_type)
w_np = generate_quantized_np(get_const_tuple(w_shape), weight_bits, input_type)
if dorefa:
w_ = np.copy(w_np).astype(out_dtype)
for x in np.nditer(w_, op_flags=['readwrite']):
x[...] = 1 if x == 1 else -1
b_np = topi.testing.conv2d_nchw_python(a_np.astype(out_dtype), w_, stride, padding)
else:
b_np = topi.testing.conv2d_nchw_python(a_np, w_np, stride, padding)
return a_np, w_np, b_np
a_np, w_np, b_np = get_ref_data()
ctx = tvm.cpu(0)
a = tvm.nd.array(a_np, ctx)
w = tvm.nd.array(w_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
func = tvm.build(s, [A, W, B], "llvm")
func(a, w, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
def verify_bitserial_conv2d_nhwc(batch, in_size, in_channel, num_filter, kernel, stride, padding,
activation_bits, weight_bits, dorefa):
in_height = in_width = in_size
input_type='uint32'
out_dtype='int32'
with tvm.target.create('llvm'):
A = tvm.placeholder((batch, in_height, in_width, in_channel), dtype=input_type, name='A')
W = tvm.placeholder((kernel, kernel, in_channel, num_filter), dtype=input_type, name='W')
B = topi.nn.bitserial_conv2d(A, W, stride, padding, activation_bits, weight_bits, out_dtype=out_dtype,
layout="NHWC", dorefa=dorefa)
s = topi.generic.schedule_bitserial_conv2d_nhwc([B])
a_shape = get_const_tuple(A.shape)
w_shape = get_const_tuple(W.shape)
@memoize("topi.tests.test_topi_bitseral_conv2d_nhwc")
def get_ref_data():
a_np = generate_quantized_np(get_const_tuple(a_shape), activation_bits, input_type)
w_np = generate_quantized_np(get_const_tuple(w_shape), weight_bits, input_type)
if dorefa:
w_ = np.copy(w_np).astype(out_dtype)
for x in np.nditer(w_, op_flags=['readwrite']):
x[...] = 1 if x == 1 else -1
b_np = topi.testing.conv2d_nhwc_python(a_np, w_, stride, padding).astype(out_dtype)
else:
b_np = topi.testing.conv2d_nhwc_python(a_np, w_np, stride, padding).astype(out_dtype)
return a_np, w_np, b_np
a_np, w_np, b_np = get_ref_data()
ctx = tvm.cpu(0)
a = tvm.nd.array(a_np, ctx)
w = tvm.nd.array(w_np, ctx)
b = tvm.nd.array(np.zeros(get_const_tuple(B.shape), dtype=B.dtype), ctx)
func = tvm.build(s, [A, W, B], 'llvm')
func(a, w, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5)
def test_bitserial_conv2d():
in_size = 56
ic, oc = 64, 64
k = 3
stride = 1
pad = 1
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, True)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, True)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 1, 1, False)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 1, False)
verify_bitserial_conv2d_nchw(1, in_size, ic, oc, k, stride, pad, 2, 2, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, True)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, True)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 1, 1, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 1, False)
verify_bitserial_conv2d_nhwc(1, in_size, ic, oc, k, stride, pad, 2, 2, False)
if __name__ == "__main__":
test_bitserial_conv2d()
|
totalvoice/totalvoice-python | totalvoice/cliente/api/conta.py | Python | mit | 6,794 | 0.003725 | # coding=utf-8
from __future__ import absolute_import
from .helper import utils
from .helper.routes import Routes
from totalvoice.cliente.api.totalvoice import Totalvoice
import json, requests
class Conta(Totalvoice):
def __init__(self, cliente):
super(Conta, self).__init__(cliente)
def criar_conta(self, nome, login, senha, cpf_cnpj=None, preco_fixo=None, preco_cel=None, preco_ramal=None, email_financeiro=None, nome_fantasia=None, valor_aviso_saldo_baixo=None):
"""
:Descrição:
Função para editar a sua conta.
:Utilização:
editar_conta()
:Parâmetros:
- nome:
Nome da conta.
- login:
Login da conta.
- senha:
Senha da conta;
- cpf_cnpj:
CPF ou CNPJ da conta.
- preco_fixo:
Preço de chamadas para fixo da conta.
- preco_cel:
Preço de chamadas para celulares da conta.
- preco_ramal:
Preço para ramais da conta.
- email_financeiro:
E-mail responsável pelo financeiro da conta.
- nome_fantasia
Nome fantasia da conta
- valor_aviso_saldo_baixo
É necessário ser um valor inteiro, ex: 100 .Quando o saldo de créditos atingir ou ficar abaixo do valor determinado, você receberá um aviso no email do email_financeiro(caso este não tenha sido cadastrado você receberá no e-mail de login).
"""
host = self.cliente.host + Routes.CONTA
data = self.__build_conta(nome, login, senha, cpf_cnpj, preco_fixo, preco_cel, preco_ramal, email_financeiro, nome_fantasia, valor_aviso_saldo_baixo) |
response = requests.post(host, headers=utils.build_header(self.cliente.access_token), data=data)
return response.content
def deletar(self, id):
"""
:Descrição:
Função para deletar uma conta.
:Utilização:
deletar(id)
:Parâmetros:
- id:
ID da conta ativa.
"""
host = self.build_host(self.cliente.host, Routes.CONTA, [i | d])
response = requests.delete(host, headers=utils.build_header(self.cliente.access_token))
return response.content
def get_by_id(self, id):
"""
:Descrição:
Função para buscar as informações de uma conta.
:Utilização:
get_by_id(id)
:Parâmetros:
- id:
ID da conta ativa.
"""
host = self.cliente.host + Routes.CONTA + "/" + id
return self.get_request(host)
def editar_conta(self, nome, login, senha, cpf_cnpj=None, preco_fixo=None, preco_cel=None, preco_ramal=None, email_financeiro=None, nome_fantasia=None):
"""
:Descrição:
Função para editar a sua conta.
:Utilização:
editar_conta()
:Parâmetros:
- nome:
Nome da conta.
- login:
Login da conta.
- senha:
Senha da conta;
- cpf_cnpj:
CPF ou CNPJ da conta.
- preco_fixo:
Preço de chamadas para fixo da conta.
- preco_cel:
Preço de chamadas para celulares da conta.
- preco_ramal:
Preço para ramais da conta.
- email_financeiro:
E-mail responsável pelo financeiro da conta.
- nome_fantasia
Nome fantasia da conta
"""
host = self.build_host(self.cliente.host, Routes.CONTA)
data = self.__build_conta(nome, login, senha, cpf_cnpj, preco_fixo, preco_cel, preco_ramal, email_financeiro, nome_fantasia)
response = requests.put(host, headers=utils.build_header(self.cliente.access_token), data=data)
return response.content
def get_relatorio(self):
"""
:Descrição:
Função para pegar o relatório de conta.
:Utilização:
get_relatorio()
"""
host = self.build_host(self.cliente.host, Routes.CONTA, ["relatorio"])
return self.get_request(host)
def recarga_bonus(self, id, valor):
"""
:Descrição:
Função para realizar recarga de bônus em uma conta filha
:Utilização:
recarga_bonus()
:Parâmetros:
- id:
ID da conta filha.
- valor:
Valor a ser creditado como bônus.
"""
host = self.cliente.host + Routes.CONTA + "/" + id + "/bonus"
data = json.dumps({"valor": valor})
response = requests.post(host, headers=utils.build_header(self.cliente.access_token), data=data)
return response.content
def __build_conta(self, nome, login, senha, cpf_cnpj, preco_fixo, preco_cel, preco_ramal, email_financeiro, nome_fantasia, valor_aviso_saldo_baixo):
data = {}
data.update({"nome": nome})
data.update({"login": login})
data.update({"senha": senha})
data.update({"cpf_cnpj": cpf_cnpj})
data.update({"preco_fixo": preco_fixo})
data.update({"preco_cel": preco_cel})
data.update({"preco_ramal": preco_ramal})
data.update({"email_financeiro": email_financeiro})
data.update({"nome_fantasia": nome_fantasia})
data.update({"valor_aviso_saldo_baixo":valor_aviso_saldo_baixo})
return json.dumps(data)
def get_webhook_default(self):
"""
:Descrição:
Função para obter a lista webhook default da conta.
:Utilização:
get_webhook()
"""
host = self.build_host(self.cliente.host, Routes.WEBHOOK_DEFAULT)
return self.get_request(host)
def delete_webhook_default(self, nome_webhook):
"""
:Descrição:
Função para deletar um webhook default.
:Utilização:
get_webhook(nome_webhook)
:Parâmetros:
- nome_webhook:
Nome do webhook.
"""
host = self.build_host(self.cliente.host, Routes.WEBHOOK_DEFAULT, [nome_webhook])
response = requests.delete(host, headers=utils.build_header(self.cliente.access_token))
return response.content
def edit_webhook_default(self, nome_webhook, url):
"""
:Descrição:
Função para deletar um webhook default.
:Utilização:
editar_webhook(nome_webhook, url)
:Parâmetros:
- nome_webhook:
Nome do webhook.
- url:
Url do webhook
"""
host = self.build_host(self.cliente.host, Routes.WEBHOOK_DEFAULT, [nome_webhook])
data = {}
data.update({"url" : url})
response = requests.put(host, headers=utils.build_header(self.cliente.access_token), data=json.dumps(data))
return response.content
|
Yadnyawalkya/integration_tests | cfme/tests/services/test_config_provider_servicecatalogs.py | Python | gpl-2.0 | 4,833 | 0.003517 | # -*- coding: utf-8 -*-
import pytest
from cfme import test_requirements
from cfme.services.myservice import MyService
from cfme.services.service_catalogs import ServiceCatalogs
from cfme.utils import testgen
from cfme.utils.blockers import GH
from cfme.utils.log import logger
pytestmark = [
test_requirements.service,
pytest.mark.tier(2),
pytest.mark.parametrize('job_type', ['template', 'template_limit', 'template_survey'],
ids=['template_job', 'template_limit_job', 'template_survey_job'],
scope='module'),
pytest.mark.ignore_stream('upstream'),
pytest.mark.uncollectif(lambda appliance,
job_type: appliance.version < '5.10' and job_type == 'workflow')]
def pytest_generate_tests(metafunc):
# Filter out providers without provisioning data or hosts defined
argnames, argvalues, idlist = testgen.config_managers(metafunc)
new_idlist = []
new_argvalues = []
for i, argvalue_tuple in enumerate(argvalues):
args = dict(list(zip(argnames, argvalue_tuple)))
if not args['config_manager_obj'].yaml_data['provisioning']:
continue
new_idlist.append(idlist[i])
new_argvalues.append(argvalues[i])
testgen.parametrize(metafunc, argnames, new_argvalues, id | s=new_idlist, scope='module')
@pytest.fixture(scope="modul | e")
def config_manager(config_manager_obj):
""" Fixture that provides a random config manager and sets it up"""
if config_manager_obj.type == "Ansible Tower":
config_manager_obj.create(validate=True)
else:
config_manager_obj.create()
yield config_manager_obj
config_manager_obj.delete()
@pytest.fixture(scope="function")
def catalog_item(appliance, request, config_manager, ansible_tower_dialog, catalog, job_type):
config_manager_obj = config_manager
provider_name = config_manager_obj.yaml_data.get('name')
template = config_manager_obj.yaml_data['provisioning_data'][job_type]
catalog_item = appliance.collections.catalog_items.create(
appliance.collections.catalog_items.ANSIBLE_TOWER,
name=ansible_tower_dialog.label,
description="my catalog",
display_in=True,
catalog=catalog,
dialog=ansible_tower_dialog,
provider='{} Automation Manager'.format(provider_name),
config_template=template)
request.addfinalizer(catalog_item.delete)
return catalog_item
def test_order_tower_catalog_item(appliance, config_manager, catalog_item, request, job_type):
"""Tests ordering of catalog items for Ansible Template and Workflow jobs
Metadata:
test_flag: provision
Polarion:
assignee: nachandr
initialEstimate: 1/4h
casecomponent: Services
caseimportance: high
"""
if job_type == 'template_limit':
host = config_manager.yaml_data['provisioning_data']['inventory_host']
dialog_values = {'limit': host}
service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name,
dialog_values=dialog_values)
else:
service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)
service_catalogs.order()
logger.info('Waiting for cfme provision request for service %s', catalog_item.name)
cells = {'Description': catalog_item.name}
order_request = appliance.collections.requests.instantiate(cells=cells, partial_check=True)
order_request.wait_for_request(method='ui')
msg = 'Request failed with the message {}'.format(order_request.row.last_message.text)
assert order_request.is_succeeded(method='ui'), msg
appliance.user.my_settings.default_views.set_default_view('Configuration Management Providers',
'List View')
@pytest.mark.meta(blockers=[GH('ManageIQ/integration_tests:8610')])
def test_retire_ansible_service(appliance, catalog_item, request, job_type):
"""Tests retiring of catalog items for Ansible Template and Workflow jobs
Metadata:
test_flag: provision
Polarion:
assignee: nachandr
casecomponent: Services
caseimportance: medium
initialEstimate: 1/4h
"""
service_catalogs = ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name)
service_catalogs.order()
logger.info('Waiting for cfme provision request for service %s', catalog_item.name)
cells = {'Description': catalog_item.name}
order_request = appliance.collections.requests.instantiate(cells=cells, partial_check=True)
order_request.wait_for_request(method='ui')
msg = "Request failed with the message {}".format(order_request.row.last_message.text)
assert order_request.is_succeeded(method='ui'), msg
myservice = MyService(appliance, catalog_item.name)
myservice.retire()
|
d3banjan/polyamide | webdev/lib/python2.7/site-packages/pip/commands/wheel.py | Python | bsd-2-clause | 9,184 | 0 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import warnings
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.exceptions import CommandError, PreviousBuildDirError
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.utils import normalize_path
from pip.utils.build import BuildDirectory
from pip.utils.deprecation import RemovedInPip7Warning, RemovedInPip8Warning
from pip.wheel import WheelBuilder
from pip import cmdoptions
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
logger = logging.getLogger(__name__)
class WheelCommand(Command):
"""
Build Wheel archives for your requirements and dependencies.
Wheel is a built-package format, and offers the advantage of not
recompiling your software during every install. For more details, see the
wheel docs: http://wheel.readthedocs.org/en/latest.
Requirements: setuptools>=0.8, and wheel.
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
package to build individual wheels.
"""
name = 'wheel'
usage = """
%prog [options] <requirement specifier> ...
%prog [options] -r <requirements file> ...
%prog [options] [-e] <vcs project url> ...
%prog [options] [-e] <local project path> ...
%prog [options] <archive url/path> ..."""
summary = 'Build wheels from your requirements.'
def __init__(self, *args, **kw):
super(WheelCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-w', '--wheel-dir',
dest='wheel_dir',
metavar='dir',
default=DEFAULT_WHEEL_DIR,
help=("Build wheels into <dir>, where the default is "
"'<cwd>/wheelhouse'."),
)
cmd_opts.add_option(cmdoptions.use_wheel.make())
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
cmd_opts.add_option(
'--build-option',
dest='build_options',
metavar='options',
action='append',
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
cmd_opts.add_option(cmdoptions.editable.make())
cmd_opts.add_option(cmdoptions.requirements.make())
cmd_opts.add_option(cmdoptions.download_cache.make())
cmd_opts.add_option(cmdoptions.src.make())
cmd_opts.add_option(cmdoptions.no_deps.make())
cmd_opts.add_option(cmdoptions.build_dir.make())
cmd_opts.add_option(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the 'bdist_wheel' command.")
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
cmd_opts.add_option(cmdoptions.no_clean.make())
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
# confirm requirements
try:
import wheel.bdist_wheel
# Hack to make flake8 not complain about an unused import
wheel.bdist_wheel
except ImportError:
raise CommandError(
"'pip wheel' requires the 'wheel' package. To fix this, run: "
"pip install wheel"
)
try:
import pkg_resources
except ImportError:
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
" To fix this, run: pip install --upgrade setuptools"
)
else:
if not hasattr(pkg_resources, 'DistInfoDistribution'):
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info "
"support. To fix this, run: pip install --upgrade "
"setuptools"
)
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
if options.mirrors:
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip7Warning,
)
index_urls += options.mirrors
if options.download_cache:
warnings.warn(
"--download-cache has been deprecated and will be removed in "
"the future. Pip now automatically uses and configures its "
"cache.",
RemovedInPip8Warning,
)
if options.build_dir:
options.build_dir = os.path.abspath(options.build_dir)
with self._build_session(options) as session:
finder = PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
use_wheel=options.use_wheel,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)
build_delete = (not (options.no_clean or options.build_dir))
with BuildDirectory(options.build_dir,
delete=build_delete) as build_dir:
requirement_set = RequirementSet(
build_dir=build_dir,
src_dir=options.src_dir,
download_dir=None,
ignore_dependencies=options.ignore_dependencies,
ignore_installed=True,
isolated=options.isolated_mode,
session=session,
wheel_download_dir=options.wheel_dir
)
# make the wheelhouse
if no | t os.path.exists(options.wheel_dir):
os.makedirs(options.wheel_dir)
# parse args and/or requirements f | iles
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(
name, None, isolated=options.isolated_mode,
)
)
for name in options.editables:
requirement_set.add_requirement(
InstallRequirement.from_editable(
name,
default_vcs=options.default_vcs,
isolated=options.isolated_mode,
)
)
for filename in options.requirements:
for req in parse_requirements(
filename,
finder=finder,
options=options,
session=session):
requirement_set.add_requirement(req)
# fail if no requirements
if not requirement_set.has_requirements:
logger.error |
terrycojones/dark-matter | test/test_fastq.py | Python | mit | 4,526 | 0 | from six.moves import builtins
from dark.reads import AARead, DNARead, RNARead
from dark.fastq import FastqReads
from dark.utils import StringIO
from unittest import TestCase, skip
try:
from unittest.mock import patch, mock_open
except ImportError:
from mock import patch
class TestFastqReads(TestCase):
"""
Tests for the L{dark.fastq.FastqReads} class.
"""
def testEmpty(self):
"""
An empty FASTQ file results in an empty iterator.
"""
with patch.object(builtins, 'open', mock_open()):
reads = FastqReads('filename.fastq')
self.assertEqual([], list(reads))
def testOneRead(self):
"""
A FASTQ file with one read must be read properly.
"""
data = '\n'.join(['@id1', 'ACGT', '+', '!!!!'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq'))
self.assertEqual([DNARead('id1', 'ACGT', '!!!!')], reads)
def testTwoReads(self):
"""
A FASTQ file with two reads must be read properly and its
sequences must be returned in the correct order.
"""
data = '\n'.join(['@id1', 'ACGT', '+', '!!!!',
'@id2', 'TGCA', '+', '????'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq'))
self.assertEqual(2, len(reads))
self.assertEqual([DNARead('id1', 'ACGT', '!!!!'),
DNARead('id2', 'TGCA', '????')], reads)
def testTypeDefaultsToDNA(self):
"""
A FASTQ file whose type is not specified must result in reads that
are instances of DNARead.
"""
data = '\n'.join(['@id1', 'ACGT', '+', '!!!!'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq'))
self.assertTrue(isinstance(reads[0], DNARead))
def testTypeAA(self):
"""
A FASTQ file whose read class is AARead must result in reads that
are instances of AARead.
"""
data = '\n'.join(['@id1', 'ACGT', '+', '!!!!'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq', AARead))
self.assertTrue(isinstance(reads[0], AARead))
def testTypeDNA(self):
"""
A FASTQ file whose read class is DNARead must result in reads that
are instances of DNARead.
"""
data = '\n'.join(['@id1', 'ACGT', '+', '!!!!'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq', DNARead))
self.assertTrue(isinstance(reads[0], DNARead))
def testTypeRNA(self):
"""
A FASTQ file whose read class is RNARead must result in reads that
are instances of RNARead.
"""
data = '\n'.join(['@id1', 'ACGT', | '+', '!!!!'])
with patch.object(builtins, 'open', mock_open(read_data=data)):
reads = list(FastqReads('filename.fastq', RNARead))
self.assertTrue(isinstance(reads[0], RNARead))
@skip('Some tests are broken and skipped under latest BioPython')
def testTwoFiles(self):
"""
It must be possible to read from two FASTQ files.
"""
| class SideEffect(object):
def __init__(self, test):
self.test = test
self.count = 0
def sideEffect(self, filename):
if self.count == 0:
self.test.assertEqual('file1.fastq', filename)
self.count += 1
return StringIO('@id1\nACTG\n+\n!!!!\n')
elif self.count == 1:
self.test.assertEqual('file2.fastq', filename)
self.count += 1
return StringIO('@id2\nCAGT\n+\n!!!!\n')
else:
self.test.fail('We are only supposed to be called twice!')
sideEffect = SideEffect(self)
with patch.object(builtins, 'open') as mockMethod:
mockMethod.side_effect = sideEffect.sideEffect
reads = FastqReads(['file1.fastq', 'file2.fastq'])
self.assertEqual(
[
DNARead('id1', 'ACTG', '!!!!'),
DNARead('id2', 'CAGT', '!!!!'),
],
list(reads))
|
renesugar/arrow | python/pyarrow/tests/test_array.py | Python | apache-2.0 | 58,523 | 0 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import decimal
import hypothesis as h
import hypothesis.strategies as st
import itertools
import pickle
import pytest
import struct
import sys
import numpy as np
try:
import pickle5
except ImportError:
pickle5 = None
import pyarrow as pa
import pyarrow.tests.strategies as past
from pyarrow import compat
def test_total_bytes_allocated():
assert pa.total_allocated_bytes() == 0
def test_getitem_NULL():
arr = pa.array([1, None, 2])
assert arr[1] is pa.NULL
def test_constructor_raises():
# This could happen by wrong capitalization.
# ARROW-2638: prevent calling extension class constructors directly
with pytest.raises(TypeError):
pa.Array([1, 2])
def test_list_format():
arr = pa.array([[1], None, [2, 3, None]])
result = arr.format()
expected = """\
[
[
1
],
null,
[
2,
3,
null
]
]"""
assert result == expected
def test_string_format():
arr = pa.array([u'', None, u'foo'])
result = arr.format()
expected = """\
[
"",
null,
"foo"
]"""
assert result == expected
def test_long_array_format():
arr = pa.array(range(100))
result = arr.format(window=2)
expected = """\
[
0,
1,
...
98,
99
]"""
assert result == expected
def test_binary_format():
arr = pa.array([b'\x00', b'', None, b'\x01foo', b'\x80\xff'])
result = arr.format()
expected = """\
[
00,
,
null,
01666F6F,
80FF
]"""
assert result == expected
def test_to_numpy_zero_copy():
arr = pa.array(range(10))
old_refcount = sys.getrefcount(arr)
np_arr = arr.to_numpy()
np_arr[0] = 1
assert arr[0] == 1
assert sys.getrefcount(arr) == old_refcount
arr = None
import gc
gc.collect()
# Ensure base is still valid
assert np_arr.base is not None
expected = np.arange(10)
expected[0] = 1
np.testing.assert_array_equal(np_arr, expected)
def test_to_numpy_unsupported_types():
# ARROW-2871: Some primitive types are not yet supported in to_numpy
bool_arr = pa.array([True, False, True])
with pytest.raises(NotImplementedError):
bool_arr.to_numpy()
null_arr = pa.array([None, None, None])
with pytest.raises(NotImplementedError):
null_arr.to_numpy()
@pytest.mark.pandas
def test_to_pandas_zero_copy():
import gc
arr = pa.array(range(10))
for i in range(10):
series = arr.to_pandas()
assert sys.getrefcount(series) == 2
series = None # noqa
assert sys.getrefcount(arr) == 2
for i in range(10):
arr = pa.array(range(10))
series = arr.to_pandas()
arr = None
gc.collect()
# Ensure base is still valid
# Because of py.test's assert inspection magic, if you put getrefcount
# on the line being examined, it will be 1 higher than you expect
base_refcount = sys.getrefcount(series.values.base)
assert base_refcount == 2
series.sum()
@pytest.mark.nopandas
@pytest.mark.pandas
def test_asarray():
# ensure this is tested both when pandas is present or not (ARROW-6564)
arr = pa.array(range(4))
# The iterator interface gives back an array of Int64Value's
np_arr = np.asarray([_ for _ in arr])
assert np_arr.tolist() == [0, 1, 2, 3]
assert np_arr.dtype == np.dtype('O')
assert type(np_arr[0]) == pa.lib.Int64Value
# Calling with the arrow array gives back an array with 'int64' dtype
np_arr = np.asarray(arr)
assert np_arr.tolist() == [0, 1, 2, 3]
assert np_arr.dtype == np.dtype('int64')
# An optional type can be specified when calling np.asarray
np_arr = np.asarray(arr, dty | pe='str')
assert np_a | rr.tolist() == ['0', '1', '2', '3']
# If PyArrow array has null values, numpy type will be changed as needed
# to support nulls.
arr = pa.array([0, 1, 2, None])
assert arr.type == pa.int64()
np_arr = np.asarray(arr)
elements = np_arr.tolist()
assert elements[:3] == [0., 1., 2.]
assert np.isnan(elements[3])
assert np_arr.dtype == np.dtype('float64')
# DictionaryType data will be converted to dense numpy array
arr = pa.DictionaryArray.from_arrays(
pa.array([0, 1, 2, 0, 1]), pa.array(['a', 'b', 'c']))
np_arr = np.asarray(arr)
assert np_arr.dtype == np.dtype('object')
assert np_arr.tolist() == ['a', 'b', 'c', 'a', 'b']
def test_array_getitem():
arr = pa.array(range(10, 15))
lst = arr.to_pylist()
for idx in range(-len(arr), len(arr)):
assert arr[idx].as_py() == lst[idx]
for idx in range(-2 * len(arr), -len(arr)):
with pytest.raises(IndexError):
arr[idx]
for idx in range(len(arr), 2 * len(arr)):
with pytest.raises(IndexError):
arr[idx]
def test_array_slice():
arr = pa.array(range(10))
sliced = arr.slice(2)
expected = pa.array(range(2, 10))
assert sliced.equals(expected)
sliced2 = arr.slice(2, 4)
expected2 = pa.array(range(2, 6))
assert sliced2.equals(expected2)
# 0 offset
assert arr.slice(0).equals(arr)
# Slice past end of array
assert len(arr.slice(len(arr))) == 0
with pytest.raises(IndexError):
arr.slice(-1)
# Test slice notation
assert arr[2:].equals(arr.slice(2))
assert arr[2:5].equals(arr.slice(2, 3))
assert arr[-5:].equals(arr.slice(len(arr) - 5))
with pytest.raises(IndexError):
arr[::-1]
with pytest.raises(IndexError):
arr[::2]
n = len(arr)
for start in range(-n * 2, n * 2):
for stop in range(-n * 2, n * 2):
assert arr[start:stop].to_pylist() == arr.to_pylist()[start:stop]
def test_array_diff():
# ARROW-6252
arr1 = pa.array(['foo'], type=pa.utf8())
arr2 = pa.array(['foo', 'bar', None], type=pa.utf8())
arr3 = pa.array([1, 2, 3])
arr4 = pa.array([[], [1], None], type=pa.list_(pa.int64()))
assert arr1.diff(arr1) == ''
assert arr1.diff(arr2) == '''
@@ -1, +1 @@
+"bar"
+null
'''
assert arr1.diff(arr3) == '# Array types differed: string vs int64'
assert arr1.diff(arr3) == '# Array types differed: string vs int64'
assert arr1.diff(arr4) == ('# Array types differed: string vs '
'list<item: int64>')
def test_array_iter():
arr = pa.array(range(10))
for i, j in zip(range(10), arr):
assert i == j
assert isinstance(arr, compat.Iterable)
def test_struct_array_slice():
# ARROW-2311: slicing nested arrays needs special care
ty = pa.struct([pa.field('a', pa.int8()),
pa.field('b', pa.float32())])
arr = pa.array([(1, 2.5), (3, 4.5), (5, 6.5)], type=ty)
assert arr[1:].to_pylist() == [{'a': 3, 'b': 4.5},
{'a': 5, 'b': 6.5}]
def test_array_factory_invalid_type():
class MyObject:
pass
arr = np.array([MyObject()])
with pytest.raises(ValueError):
pa.array(arr)
def test_array_ref_to_ndarray_base():
arr = np.array([1, 2, 3])
refcount = sys.getrefcount(arr)
arr2 = pa.array(arr) # noqa
assert sys.getrefcount(arr) == (refcount + 1)
def test_array_eq_raises():
# ARROW-2150: we are raising when comparing arrays until we define the
# behavior to either be elementwise comparisons or data equality
arr1 = pa.array([1, 2, 3], ty |
kdeloach/model-my-watershed | deployment/cfn/utils/constants.py | Python | apache-2.0 | 568 | 0 | EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large',
'r4.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
| HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
AMAZON_S3_HOSTED_ZONE_ID = 'Z3AQBSTGFYJSTF'
AMAZON_S3_WE | BSITE_DOMAIN = 's3-website-us-east-1.amazonaws.com'
|
katyast/Se-Python-17-Stoliarova | pages/create_film_page.py | Python | apache-2.0 | 347 | 0.002882 | from pages.internal_page import InternalPage
fr | om pages.blocks.film_form import FilmForm
from selenium.webdriver.support.select import Select
class CreateFilmPage(InternalPage):
def __init__(self, driver, base_url):
super(CreateFilmPage, self).__init__(driver, base_url)
self.film | _form = FilmForm(self.driver, self.base_url) |
dimagi/commcare-hq | corehq/apps/linked_domain/migrations/0011_auto_20200728_2316.py | Python | bsd-3-clause | 804 | 0.001244 | # Generated by Django 2.2.13 on 2020-07-28 23:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
| ('linked_domain', '0010_auto_20200622_0156'),
]
operations = [
migrations.AlterField(
model_name='domainlinkhistory',
name='model',
field=models.CharField(choices=[('app', ' | Application'), ('custom_user_data', 'Custom User Data Fields'), ('custom_product_data', 'Custom Product Data Fields'), ('custom_location_data', 'Custom Location Data Fields'), ('roles', 'User Roles'), ('toggles', 'Feature Flags and Previews'), ('fixture', 'Lookup Table'), ('case_search_data', 'Case Search Settings'), ('report', 'Report'), ('data_dictionary', 'Data Dictionary')], max_length=128),
),
]
|
dilipbobby/DataScience | Python3/Level-1/allprimes.py | Python | apache-2.0 | 457 | 0.04814 |
#print | 's all prime numbers in a given range limit
_author__ = "Dilipbobby"
#Take the input from the user:
lower = int(input("Enter lower range: "))
upper = int(input("Enter upper range: "))
#condtion to print all prime numbers that are in btw given number limits
for num in range(lower,upper + 1):
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
| |
e2jk/nautilus-image-manipulator | nautilus_image_manipulator/NautilusImageManipulatorDialog.py | Python | gpl-3.0 | 26,709 | 0.002621 | # -*- coding: utf-8 -*-
### BEGIN LICENSE
# Copyright (C) 2010-2013 Emilien Klein <emilien _AT_ klein _DOT_ st>
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
import os
import urllib2
import logging
from nautilus_image_manipulator.helpers import get_builder
from nautilus_image_manipulator.ImageManipulations import ImageManipulations
from ProfileSettings import Profile, Config
from upload.BaseUploadSite import UnknownUploadDestinationException
from upload.BaseUploadSite import InvalidEndURLsException
from upload.BaseUploadSite import FinalURLsNotFoundException
import gettext
from gettext import gettext as _
gettext.textdomain('nautilus-image-manipulator')
class NautilusImageManipulatorDialog(Gtk.Dialog):
__gtype_name__ = "NautilusImageManipulatorDialog"
# To construct a new instance of this method, the following notable
# methods are called in this order:
# __new__(cls)
# __init__(self)
# finish_initializing(self, builder)
# __init__(self)
#
# For this reason, it's recommended you leave __init__ empty and put
# your inialization code in finish_intializing
def __new__(cls):
"""Special static method that's automatically called by Python when
constructing a new instance of this class.
Returns a fully instantiated NautilusImageManipulatorDialog object.
"""
builder = get_builder('NautilusImageManipulatorDialog')
new_object = builder.get_object('nautilus_image_manipulator_dialog')
new_object.finish_initializing(builder)
return new_object
def finish_initializing(self, builder):
"""Called when we're finished initializing.
finish_initalizing should be called after parsing the ui definition
and creating a NautilusImageManipulatorDialog object with it in order to
finish initializing the start of the new NautilusImageManipulatorDialog
instance.
Put your initialization code in here and leave __init__ undefined.
"""
# Get a reference to the builder's get_object and set up the signals.
self.o = builder.get_object
builder.connect_signals(self)
# Populate the list of sites to upload to
model = self.o("upload_combo").get_model()
self.upload_sites = [model.get_value(model.get_iter(i), 0) for \
i, k in enumerate(model)]
# Load the saved configuration
self.loadConfig()
self.processingCanceled = False
# Give the Resize button the focus to respond to Enter
self.o("resize_button").grab_focus()
def set_files(self, files):
self.files = files
def resize_button_clicked(self, widget, data=None):
"""The user has elected to resize the images
Called before the dialog returns Gtk.RESONSE_OK from run().
"""
idSelectedProfile = self.o("profiles_combo").get_active()
# If the custom profile was selected, update it with the currently
# selected parameters
if idSelectedProfile == (len(self.conf.profiles) - 1):
p = self.create_new_profile_from_custom_settings()
p.name = _("Custom settings")
self.conf.profiles[idSelectedProfile] = p
self.p = self.conf.profiles[idSelectedProfile]
logging.info("The following profile has been selected:\n%s" % self.p)
# Disable the parameter UI elements and display the progress bar
self.o("details_box").set_sensitive(False)
self.o("resize_button").set_sensitive(False)
self.o("deleteprofile_button").set_visible(False)
self.o("newprofile_button").set_visible(False)
self.o("progressbar").set_text("%s 0%%" % ("Resizing images..."))
self.o("progressbar").show()
while Gtk.events_pending():
Gtk.main_iteration() # Used to refresh the UI
# Resize the images
im = ImageManipulations(self, self.files, self.p)
im.connect("resizing_done", self.on_resizing_done)
task = im.resize_images()
GObject.idle_add(task.next)
# Remember the settings for next time
self.saveConfig()
def destination_entry_changed_cb(self, widget, data=None):
if widget == self.o("subfolder_entry"):
isError = (0 == len(widget.get_text()))
errorLabel = self.o("subfolder_entry_error_label")
elif widget == self.o("append_entry"):
errorLabel = None
if 0 == len(widget.get_text()):
isError = True
self.o("append_entry_empty_error_label").set_visible(True)
self.o("append_entry_invalid_error_label").set_visible(False)
elif os.path.sep == widget.get_text()[-1]:
isError = True
self.o("append_entry_invalid_error_label").set_text(_("The string to append cannot end in %s") % os.path.sep)
self.o("append_entry_empty_error_label").set_visible(False)
self.o("append_entry_invalid_error_label").set_visible(True)
else:
# TODO: Check that the value is valid to be appended to the filename
isError = False
self.o("append_entry_empty_error_label").set_visible(False)
self.o("append_entry_invalid_error_label").set_visible(False)
elif widget == self.o("zipname_entry"):
isError = (0 == len(widget.get_text()))
errorLabel = self.o("zipname_entry_error_label")
# Adapt the visibility of the appropriate error message
if errorLabel:
errorLabel.set_visible(isError)
# Don't allow resizing if text is empty
self.o("resize_button").set_sensitive(not isError)
def on_resizing_done(self, im):
"""Triggered when all the images have been resized"""
# Only pack and send the images if the process was not canceled and if there is at least one image to pack
if self.p.destination == 'upload' and not self.processingCanceled and len(im.newFiles) > 0:
# The user wants to upload to a website
if len(im.newFiles) > 1:
# There are more than one image, zip the files together and upload the zipfile
im.connect("packing_done", self.on_packing_done)
task = im.pack_images()
GObject.idle_add(task.next)
| else:
# There is only one image, send that image alone (don't zip the file)
self.upload_file(im, im.newFiles[0])
else:
# The user doesn't want to send the images, we're done!
self.destroy()
def on_packing_done(self, im, zipfile):
"""Triggered when al | l the images have been packed together."""
self.upload_file(im, zipfile)
def upload_file(self, im, fileToUpload):
"""Uploads a file to a website."""
# Import the module that takes care of uploading to the selected website
import_string = "from upload.z_%s import UploadSite" % \
self.p.url.replace(".", "_").replace("/", "")
logging.debug("import_string: %s" % import_string)
# Make sure the import does not fail
try:
exec import_string
except ImportError:
self.error_on_uploading(_("The selected upload site %(site_name)s is not valid.") % {"site_name": '"%s"' % self.p.url} + "\n\n%(extra_info)s", fileToUpload, True)
return
|
ruchee/vimrc | vimfiles/bundle/vim-python/submodules/toml/toml/__init__.py | Python | mit | 723 | 0 | """Python module which parses and emits TOML.
Released under the MIT license.
| """
from toml import encoder
from toml import decoder
__version__ = "0.10.2"
_spec_ = "0.5.0"
load = decoder.load
loads = decoder.loads
TomlDecoder = decoder.TomlDecoder
TomlDecodeError = decoder.TomlDecodeError
TomlPreserveCommentDecoder = decoder.TomlPreserveCo | mmentDecoder
dump = encoder.dump
dumps = encoder.dumps
TomlEncoder = encoder.TomlEncoder
TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder
TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder
TomlNumpyEncoder = encoder.TomlNumpyEncoder
TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder
TomlPathlibEncoder = encoder.TomlPathlibEncoder
|
mrcl/HakketyYaks | app/grant_hunter_vars.py | Python | mit | 2,621 | 0.058375 |
def calc_approve( pool, area, age, group, amount, percent ):
debug=False
print("calculating results")
approve=0.65
decline=0.35
approve *= list_pool[pool][0]
decline *= list_pool[pool][1]
approve *= list_area[area][0]
decline *= list_area[area][1]
approve *= list_age[age][0]
decline *= list_a | ge[age][1]
approve *= list_group[group][0]
decline *= list_group[group][1]
approve *= list_amount[amount][0]
decline *= list_amount[amount][1]
approve *= list_percent[percent][0]
decline *= list_percent[percent][1]
result="Your grant application is likely to be "
if (approve > decline):
result+="approved. :D"
else:
result+="declined. =("
if debug:
result += " DEBUG: (" + str(approve) + " : " + str(decline) + ")"
return | result
list_pool={
'Creative Communities Local Funding Scheme':(0.126,0.186),
'Social And Recreation Fund':(0.114,0.164),
'Betty Campbell Accommodation Assistance':(0.066,0.024),
'Wellington Venue Subsidy':(0.063,0.028),
'Built Heritage Incentive Fund':(0.060,0.029),
'C H Izard Bequest':(0.020,0.068),
'Our Living City Fund':(0.038,0.033),
'Community Events Sponsorship':(0.014,0.026),
'Wellington Regional Amenities Fund':(0.005,0.018),
'General Grants':(0.189,0.111),
'Arts And Culture Fund':(0.147,0.208)
}
list_area={
'TeAro':(0.067,0.089),
'Newtown':(0.063,0.090),
'Tawa':(0.045,0.044),
'Miramar':(0.040,0.036),
'AroValley-Highbury':(0.033,0.042),
'Kilbirnie':(0.028,0.039),
'IslandBay-OwhiroBay':(0.031,0.031),
'Karori':(0.030,0.033),
'Kelburn':(0.013,0.033),
'All':(0.192,0.135),
'WellingtonCBD':(0.444,0.566)
}
list_age={
'Seniors':(0.011,0.019),
'Youth':(0.100,0.168),
'Children':(0.057,0.079),
'All':(0.831,0.734)
}
list_group={
'Youth/Students/School':(0.041,0.035),
'Children':(0.007,0.034),
'Women':(0.014,0.011),
'Families':(0.005,0.037),
'Residents':(0.013,0.001),
'Maori/Pacific':(0.030,0.026),
'Arts':(0.008,0.001),
'Parents':(0.001,0.017),
'New Zealand':(0.006,0.001),
'Disabilities':(0.01,0.026),
'Refugees':(0.011,0.003),
'Community/People':(0.756,0.653)
}
list_amount={
'>100000':(0.145,0.080),
'<=1000000':(0.113,0.084),
'<=500000':(0.306,0.322),
'<=10000':(0.143,0.185),
'<=5000':(0.264,0.322),
}
list_percent={
'<=10':(0.109,0.111),
'<=20':(0.124,0.103),
'<=30':(0.124,0.082),
'<=40':(0.088,0.080),
'<=50':(0.079,0.071),
'<=60':(0.058,0.055),
'<=70':(0.055,0.043),
'<=80':(0.050,0.069),
'<=90':(0.040,0.051),
'<=100':(0.242,0.328)
}
|
indico/indico | indico/modules/events/reminders/util.py | Python | mit | 1,052 | 0.003802 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE fil | e for more details.
from indico.modules.events.models.events import EventType |
from indico.web.flask.templating import get_template_module
def make_reminder_email(event, with_agenda, with_description, note):
"""Return the template module for the reminder email.
:param event: The event
:param with_agenda: If the event's agenda should be included
:param note: A custom message to include in the email
"""
if event.type_ == EventType.lecture:
with_agenda = False
agenda = event.timetable_entries.filter_by(parent_id=None).all() if with_agenda else None
return get_template_module('events/reminders/emails/event_reminder.txt', event=event,
url=event.short_external_url, note=note, with_agenda=with_agenda,
with_description=with_description, agenda=agenda)
|
JakeWimberley/Weathredds | tracker/urls.py | Python | gpl-3.0 | 2,582 | 0.003486 | """
Copyright 2016 Jacob C. Wimberley.
This file is part of Weathredds.
Weathredds is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Weathredds is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Weathredds. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
from . import views
from .views import ChangeEvent, ChangeThread
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'weathredds/$', views.home, name='home'),
url(r'home/$', views.home, name='_home'),
url(r'accounts/', include('django.contrib.auth.urls')),
url(r'login/$', auth_views.login, {'template_name': 'registration/login.html'}, name='login'),
url(r'logout/$', auth_views.logout_then_login, name='logout'),
#url(r'^discussions/(\d{8}_\d{4})/(\d{8}_\d{4})/$', views.discussionRange),
#url(r'^discussions/$', views.allDiscussions),
url(r'extendThread/(\d+)$', views.extendThread, name='extendThread'),
url(r'newEvent/$', views.newEvent, name='newEvent'),
url(r'newThread/$', views.newThread, name='newThread'),
url(r'newThreadInEvent/(\d+)$', views.newThread, name='newThreadInEvent'),
url(r'event/(\d+)$', views.singleEvent, name='singleEvent'),
url(r'thread/(\d+)$', views.singleThread, name='singleThread'),
url(r'changeEvent/(?P<pk>\d+)$', ChangeEvent.as_view(), name='changeEvent'),
url(r'changeThread/(?P<pk>\d+)$', ChangeThread.as_view(), name='changeThread'),
url(r'tag/([^,\\\']+)$', views.singleTag, name='singleTag'),
url(r'find/$', views.find, name='find'),
| url(r'async/togglePin$', views.asyncTogglePin, name='togglePin'),
url(r'async/toggleTag$', views.asyncToggleTag, name='toggleTag'),
url(r'async/toggleFrozen$', views.asyncToggleFrozen, name='toggleFrozen'),
url(r'async/threadsForPeriod$', views.asyncThreadsForPeriod, name='threadsForPeriod'),
url | (r'async/eventsAtTime$', views.asyncEventsAtTime, name='eventsAtTime'),
url(r'async/associateEventsWithThread$', views.asyncAssociateEventsWithThread, name='associateEventsWithThread'),
]
|
sinabahram/GrowTheTribe | GrowTheTribe/apps/talks/forms.py | Python | gpl-3.0 | 489 | 0.002045 | from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from .models import Talk, Appearance, Resource
class Tal | kForm(ModelForm):
class Meta:
model = Talk
class AppearanceForm(ModelForm):
class Meta:
model = Appearance
class ResourceForm(ModelForm):
class Meta:
model = Resource
ResourceFormSet = inlineformset_fac | tory(Talk, Resource, extra=1)
AppearanceFormSet = inlineformset_factory(Talk, Appearance, extra=1)
|
fabiomontefuscolo/reciclapy | objects.py | Python | gpl-2.0 | 2,200 | 0.004545 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more | details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import pyga | me, random
VEL = 25
class Object(pygame.sprite.Sprite):
def __init__(self):
super(Object, self).__init__()
self.shooted = False
self.position = None
def shoot(self):
self.shooted = True
def update(self):
if self.shooted:
self.rect.center = (self.rect.center[0], self.rect.center[1]-VEL)
def kill(self):
super(Object, self).kill()
self.shooted = False
class PlasticObject(Object):
def __init__(self):
super(PlasticObject, self).__init__()
self.image_src = pygame.image.load(random.choice(['images/bottle.png']))
self.image = self.image_src
self.rect = self.image.get_rect()
class MetalObject(Object):
def __init__(self):
super(MetalObject, self).__init__()
self.image_src = pygame.image.load(random.choice(['images/tin.png']))
self.image = self.image_src
self.rect = self.image.get_rect()
class PaperObject(Object):
def __init__(self):
super(PaperObject, self).__init__()
self.image_src = pygame.image.load(random.choice(['images/news.png']))
self.image = self.image_src
self.rect = self.image.get_rect()
class GlassObject(Object):
def __init__(self):
super(GlassObject, self).__init__()
self.image_src = pygame.image.load(random.choice(['images/cup.png']))
self.image = self.image_src
self.rect = self.image.get_rect()
|
twuilliam/pyMuse | pymuse/viz.py | Python | mit | 6,204 | 0.003385 | __author__ = 'benjamindeleener'
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
from datetime import datetime, timedelta
from numpy import linspace
def timeTicks(x, pos):
d = timedelta(milliseconds=x)
return str(d)
class MuseViewer(object):
def __init__(self, acquisition_freq, signal_boundaries=None):
self.refresh_freq = 0.15
self.acquisition_freq = acquisition_freq
self.init_time = datetime.now()
self.last_refresh = datetime.now()
if signal_boundaries is not None:
self.low, self.high = signal_boundaries[0], signal_boundaries[1]
else:
self.low, self.high = 0, 1
class MuseViewerSignal(MuseViewer):
def __init__(self, signal, acquisition_freq, signal_boundaries=None):
super(MuseViewerSignal, self).__init__(acquisition_freq, signal_boundaries)
self.signal = signal
self.figure, (self.ax1, self.ax2, self.ax3, self.ax4) = plt.subplots(4, 1, sharex=True, figsize=(15, 10))
self.ax1.set_title('Left ear')
self.ax2.set_title('Left forehead')
self.ax3.set_title('Right forehead')
self.ax4.set_title('Right ear')
if self.signal.do_fft:
self.ax1_plot, = self.ax1.plot(self.x_data[0:len(self.x_data)/2], self.signal.l_ear_fft[0:len(self.x_data)/2])
self.ax2_plot, = self.ax2.plot(self.x_data[0:len(self.x_data)/2], self.signal.l_forehead_fft[0:len(self.x_data)/2])
self.ax3_plot, = self.ax3.plot(self.x_data[0:len(self.x_data)/2], self.signal.r_forehead_fft[0:len(self.x_data)/2])
self.ax4_plot, = self.ax4.plot(self.x_data[0:len(self.x_data)/2], self.signal.r_ear_fft[0:len(self.x_data)/2])
self.ax1.set_ylim([0, 10000])
self.ax2.set_ylim([0, 10000])
self.ax3.set_ylim([0, 10000])
self.ax4.set_ylim([0, 10000])
else:
self.ax1_plot, = self.ax1.plot(self.signal.time, self.signal.l_ear)
self.ax2_plot, = self.ax2.plot(self.signal.time, self.signal.l_forehead)
self.ax3_plot, = self.ax3.plot(self.signal.time, self.signal.r_forehead)
self.ax4_plot, = self.ax4.plot(self.signal.time, self.signal.r_ear)
self.ax1.set_ylim([self.low, self.high])
self.ax2.set_ylim([self.low, self.high])
self.ax3.set_ylim([self.low, self.high])
self.ax4.set_ylim([self.low, self.high])
formatter = mticker.FuncFormatter(timeTicks)
self.ax1.xaxis.set_major_formatter(formatter)
self.ax2.xaxis.set_major_formatter(formatter)
self.ax3.xaxis.set_major_formatt | er(format | ter)
self.ax4.xaxis.set_major_formatter(formatter)
plt.ion()
def show(self):
plt.show(block=False)
self.refresh()
def refresh(self):
time_now = datetime.now()
if (time_now - self.last_refresh).total_seconds() > self.refresh_freq:
self.last_refresh = time_now
pass
else:
return
if self.signal.do_fft:
self.ax1_plot.set_ydata(self.signal.l_ear_fft[0:len(self.x_data)/2])
self.ax2_plot.set_ydata(self.signal.l_forehead_fft[0:len(self.x_data)/2])
self.ax3_plot.set_ydata(self.signal.r_forehead_fft[0:len(self.x_data)/2])
self.ax4_plot.set_ydata(self.signal.r_ear_fft[0:len(self.x_data)/2])
else:
self.ax1_plot.set_ydata(self.signal.l_ear)
self.ax2_plot.set_ydata(self.signal.l_forehead)
self.ax3_plot.set_ydata(self.signal.r_forehead)
self.ax4_plot.set_ydata(self.signal.r_ear)
times = list(linspace(self.signal.time[0], self.signal.time[-1], self.signal.length))
self.ax1_plot.set_xdata(times)
self.ax2_plot.set_xdata(times)
self.ax3_plot.set_xdata(times)
self.ax4_plot.set_xdata(times)
plt.xlim(self.signal.time[0], self.signal.time[-1])
self.figure.canvas.draw()
self.figure.canvas.flush_events()
class MuseViewerConcentrationMellow(object):
def __init__(self, signal_concentration, signal_mellow, signal_boundaries=None):
self.refresh_freq = 0.05
self.init_time = 0.0
self.last_refresh = datetime.now()
self.signal_concentration = signal_concentration
self.signal_mellow = signal_mellow
if signal_boundaries is not None:
self.low, self.high = signal_boundaries[0], signal_boundaries[1]
else:
self.low, self.high = 0, 1
self.x_data_concentration = range(0, self.signal_concentration.length, 1)
self.x_data_mellow = range(0, self.signal_mellow.length, 1)
self.figure, (self.ax1, self.ax2) = plt.subplots(2, 1, sharex=True, figsize=(15, 10))
self.ax1.set_title('Concentration')
self.ax2.set_title('Mellow')
self.ax1_plot, = self.ax1.plot(self.x_data_concentration, self.signal_concentration.concentration)
self.ax2_plot, = self.ax2.plot(self.x_data_mellow, self.signal_mellow.mellow)
self.ax1.set_ylim([self.low, self.high])
self.ax2.set_ylim([self.low, self.high])
formatter = mticker.FuncFormatter(timeTicks)
self.ax1.xaxis.set_major_formatter(formatter)
self.ax2.xaxis.set_major_formatter(formatter)
plt.ion()
def show(self):
plt.show(block=False)
self.refresh()
def refresh(self):
time_now = datetime.now()
if (time_now - self.last_refresh).total_seconds() > self.refresh_freq:
self.last_refresh = time_now
pass
else:
return
self.ax1_plot.set_ydata(self.signal_concentration.concentration)
self.ax2_plot.set_ydata(self.signal_mellow.mellow)
times = list(linspace(self.signal_concentration.time[0], self.signal_concentration.time[-1], self.signal_concentration.length))
self.ax1_plot.set_xdata(times)
self.ax2_plot.set_xdata(times)
plt.xlim(self.signal_concentration.time[0], self.signal_concentration.time[-1])
self.figure.canvas.draw()
self.figure.canvas.flush_events()
|
griimick/feature-mlsite | app/twitter/views.py | Python | mit | 1,537 | 0.006506 | from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
retu | rn render_template('projects/twitter.html', message='Please | enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
if len(text) == 0:
return render_template('projects/twitter.html', message='No tweets under this topic. Please enter another topic.')
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
vsajip/django | tests/regressiontests/queries/tests.py | Python | bsd-3-clause | 87,329 | 0.002714 | from __future__ import absolute_import,unicode_literals
import datetime
from operator import attrgetter
import pickle
import sys
from django.conf import settings
from django.core.exceptions import FieldError
from django.db import DatabaseError, connection, connections, DEFAULT_DB_ALIAS
from django.db.models import Count
from django.db.models.query import Q, ITER_CHUNK_SIZE, EmptyQuerySet
from django.db.models.sql.where import WhereNode, EverythingNode, NothingNode
from django.db.models.sql.datastructures import EmptyResultSet
from django.test import TestCase, skipUnlessDBFeature
from django.utils import unittest
from django.utils.datastructures import SortedDict
from django.utils import six
from .models import (Annotation, Article, Author, Celebrity, Child, Cover,
Detail, DumbCategory, ExtraInfo, Fan, Item, LeafA, LoopX, LoopZ,
ManagedModel, Member, NamedCategory, Note, Number, Plaything, PointerA,
Ranking, Related, Report, ReservedName, Tag, TvChef, Valid, X, Food, Eaten,
Node, ObjectA, ObjectB, ObjectC, CategoryItem, SimpleCategory,
SpecialCategory, OneToOneCategory, NullableName, ProxyCategory,
SingleObject, RelatedObject)
class BaseQuerysetTest(TestCase):
def assertValueQuerysetEqual(self, qs, values):
return self.assertQuerysetEqual(qs, values, transform=lambda x: x)
class Queries1Tests(BaseQuerysetTest):
def setUp(self):
generic = NamedCategory.objects.create(name="Generic")
self.t1 = Tag.objects.create(name='t1', category=generic)
self.t2 = Tag.objects.create(name='t2', parent=self.t1, category=generic)
self.t3 = Tag.objects.create(name='t3', parent=self.t1)
t4 = Tag.objects.create(name='t4', parent=self.t3)
self.t5 = Tag.objects.create(name='t5', parent=self.t3)
self.n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
self.n3 = Note.objects.create(note='n3', misc='foo', id=3)
ann1 = Annotation.objects.create(name='a1', tag=self.t1)
ann1.notes.add(self.n1)
ann2 = Annotation.objects.create(name='a2', tag=t4)
ann2.notes.add(n2, self.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
self.e2 = ExtraInfo.objects.create(info='e2', note=n2)
e1 = ExtraInfo.objects.create(info='e1', note=self.n1)
self.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
self.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=self.e2)
self.a4 = Author.objects.create(name='a4', num=4004, extra=self.e2)
self.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
self.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
self.i1 = Item.objects.create(name='one', created=self.time1, modified=self.time1, creator=self.a1, note=self.n3)
self.i1.tags = [self.t1, self.t2]
self.i2 = Item.objects.create(name='two', created=self.time2, creator=self.a2, note=n2)
self.i2.tags = [self.t1, self.t3]
self.i3 = Item.objects.create(name='three', created=time3, creator=self.a2, note=self.n3)
i4 = Item.objects.create(name='four', created=time4, creator=self.a4, note=self.n3)
i4.tags = [t4]
self.r1 = Report.objects.create(name='r1', creator=self.a1)
Report.objects.create(name='r2', creator=a3)
Report.objects.create(name='r3')
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
self.rank1 = Ranking.objects.create(rank=2, author=self.a2)
Cover.objects.create(title="first", item=i4)
Cover.objects.create(title="second", item=self.i2)
def test_ticket1050(self):
self.assertQuerysetEqual(
Item.objects.filter(tags__isnull=True),
['<Item: three>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__id__isnull=True),
['<Item: three>']
)
def test_ticket1801(self):
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i3),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
['<Author: a2>']
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertTrue(query.LOUTER not in [x[2] for x in query.alias_map.values()])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred')|Q(tags=self.t2)),
['<Item: one>']
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)),
[]
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred')|Q(tags=self.t2)),
[]
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertQuerysetEqual(list(qs), ['<Author: a2>'])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertQuerysetEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2 | ),
['<Item: one>']
)
self.assertQue | rysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
['<Item: two>']
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3],
['<Item: one>', '<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3],
['<Item: one>', '<Item: two>']
)
def test_tickets_2080_3592(self):
self.assertQuerysetEqual(
Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='one') | Q(name='a3')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(name='a3') | Q(item__name='one')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='three') | Q(report__name='r3')),
['<Author: a2>']
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertQuerysetEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertQuerysetEqual(
Au |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.