repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Tayamarn/socorro | socorro/unittest/external/postgresql/test_bugs.py | Python | mpl-2.0 | 4,325 | 0.001156 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from socorro.external.postgresql.bugs import Bugs
from socorro.lib import MissingArgumentError
from socorro.unittest.external.postgresql.unittestbase import PostgreSQLTestCase
class IntegrationTestBugs(PostgreSQLTestCase):
"""Test socorro.external.postgresql.bugs.Bugs class. """
def setUp(self):
"""Set up this test class by populating the reports table with fake
data. """
super(IntegrationTestBugs, self).setUp()
cursor = self.connection.cursor()
# Insert data
cursor.execute("""
INSERT INTO bug_associations
(signature, bug_id)
VALUES
(
'sign1',
1
),
(
'js',
1
),
(
'mysignature',
2
),
(
'mysignature',
3
);
""")
self.connection.commit()
def tearDown(self):
"""Clean up the database, delete tables and functions. """
cursor = self.connection.cursor()
cursor.execute("""
TRUNCATE bug_associations
CASCADE
""")
self.connection.commit()
super(IntegrationTestBugs, self).tearDown()
def test_get(self):
bugs = Bugs(config=self.config)
# Test 1: a valid signature with 2 bugs
params = {
"signatures": "mysignature"
}
res = bugs.get(**params)
res_expected = {
"hits": [
{
"id": 2,
"signature": "mysignature"
},
{
"id": 3,
"signature": "mysignature"
}
],
"total": 2
}
assert res['total'] == res_expected['total']
# by convert the hits to sets we can be certain order doesn't matter
expected = set([(x['id'], x['signature']) for x in res_expected['hits']])
assert set([(x['id'], x['signature']) for x in res['hits']]) == expected
# Test 2: several signatures with bugs
params = {
"signatures": ["mysignature", "js"]
}
res = bugs.get(**params)
res_expected = {
"hits": [
{
"id": 1,
"signature": "sign1"
},
{
"id": 1,
"signature": "js"
},
{
"id": 2,
"signature": "mysignature"
},
{
"id": 3,
"signature": "mysignature"
}
],
"total": 4
}
assert res['total'] == res_expected['total']
expected = set([(x['id'], x['signature']) for x in res_expected['hits']])
assert set([(x['id'], x['signature']) for x in res['hits']]) == expected
# Test 3: a | signature without bugs
params = {
"signatures": "unknown"
}
res = bugs.get(**params)
res_expected = {
"hits": [],
"total": 0
}
assert res == res_expected
# Test 4: missing argument
with pyt | est.raises(MissingArgumentError):
bugs.get()
# Test 5: search by bug_ids argument
params = {
"bug_ids": ["1", "2"]
}
res = bugs.get(**params)
# This is what we expect but because the results aren't sorted,
# we can't expect them to come in in this order.
res_expected = {
'hits': [
{'id': 1, 'signature': 'js'},
{'id': 1, 'signature': 'sign1'},
{'id': 2, 'signature': 'mysignature'}
],
'total': 3
}
assert res['total'] == res_expected['total']
assert len(res['hits']) == len(res_expected['hits'])
for row in res_expected['hits']:
assert row in res['hits']
|
Arno-Nymous/pyload | module/plugins/accounts/EuroshareEu.py | Python | gpl-3.0 | 1,755 | 0.00114 | # -*- coding: utf-8 -*-
import re
import time
from ..internal.Account import Account
from ..internal.misc import json
class EuroshareEu(Account):
__name__ = "EuroshareEu"
__type__ = "account"
__version__ = "0.12"
__status__ = "testing"
__description__ = """Euroshare.eu account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
def grab_info(self, user, password, data):
html = self.load("http://euros | hare.eu/",
get={'lang': "en"})
m = re.search(
r'<span class="btn btn--nav green darken-3">Premium account until: (\d+/\d+/\d+ \d+:\d+:\d+)<',
html)
if m is None:
premium = False
validuntil = -1
else:
premium = True
validun | til = time.mktime(
time.strptime(
m.group(1),
"%d/%m/%Y %H:%M:%S"))
return {'validuntil': validuntil,
'trafficleft': -1, 'premium': premium}
def signin(self, user, password, data):
html = self.load("http://euroshare.eu/login.html")
if r'href="http://euroshare.eu/logout.html"' in html:
self.skip_login()
json_data = json.loads(self.load("http://euroshare.eu/ajax/_account_login.ajax.php",
post={'username': user,
'password': password,
'remember': "false",
'backlink': ""}))
if json_data.get("login_status") != "success":
self.fail_login()
|
rarbg/ZeroNet | src/util/Event.py | Python | gpl-2.0 | 1,306 | 0.037519 | # Based on http://stackoverflow.com/a/2022629
class Event(list):
def __call__(self, *args, **kwargs):
for f in self[:]:
if "once" in dir(f) and f in self:
self.remove(f)
f(*args, **kwargs)
def __repr__(self):
return "Event(%s)" % list.__repr__(self)
def once(self, func, name=None):
func.once = True
func.name = None
if name: # Dont function with same name twice
names = [f.name for f in self if "once" in dir(f)]
if name not in names:
func.name = name
self.append(func)
else:
self.a | ppend(func)
return self
def testBenchmark():
def say(pre, text):
print "%s Say: %s" % (pre, text)
import time
s = time.time()
onChanged = Event()
for i in | range(1000):
onChanged.once(lambda pre: say(pre, "once"), "once")
print "Created 1000 once in %.3fs" % (time.time()-s)
onChanged("#1")
def testUsage():
def say(pre, text):
print "%s Say: %s" % (pre, text)
onChanged = Event()
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "once"))
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.once(lambda pre: say(pre, "namedonce"), "namedonce")
onChanged.append(lambda pre: say(pre, "always"))
onChanged("#1")
onChanged("#2")
onChanged("#3")
if __name__ == "__main__":
testBenchmark()
|
jdanbrown/pydatalab | google/datalab/bigquery/commands/__init__.py | Python | apache-2.0 | 679 | 0.005891 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, | either express
# or implied. See the License for the specific l | anguage governing permissions and limitations under
# the License.
from __future__ import absolute_import
from . import _bigquery
__all__ = ['_bigquery']
|
odrolliv13/Hex-Photos | shop/views/store_details.py | Python | apache-2.0 | 2,324 | 0.032702 | from django import forms
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, Http404
from shop import models as pmod
from . import templater
from django.conf import settings
import decimal, datetime
def process_request(request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/manager/login')
if request.user.is_staff == False:
return HttpResponseRedirect('/manager/')
'''Shows the list of stores'''
if request.urlparams[0] == "new":
store = pmod.Store()
store.locationName = ""
store.street = ""
store.city = ""
store.state = ""
store.zipcode = ""
store.phone = ""
store.active = True
else:
store = pmod.Store.objects.get(id=request.urlparams[0])
form = StoreForm(initial ={
'locationName': store.locationName,
'street': store.street,
'city': store.city,
'state': store.state,
'zipcode': store.zipcode,
'phone': store.phone,
})
if request.method == 'POST':
form = StoreForm(request.POST)
if form.is_valid():
store.locationName = form.cleaned_data['locationName']
store.street = form.clea | ned_data['street']
store.city = form.cleaned_data['city']
store.state = form.cleaned_data['state']
store.zipcode = form.cleaned_data['zipcode']
store.phone = form.cleaned_data['phone']
store.active = True
store.save()
return HttpResponseRedirect('/manager/stores') |
tvars = {
'form': form,
}
return templater.render_to_response(request, 'store_details.html', tvars)
class StoreForm(forms.Form):
locationName = forms.CharField(required=False, label='Location Name', widget=forms.TextInput(attrs={'class':'form-control'}))
street = forms.CharField(required=False, label='Street', widget=forms.TextInput(attrs={'class':'form-control'}))
city = forms.CharField(required=False, label='City', widget=forms.TextInput(attrs={'class':'form-control'}))
state = forms.CharField(required=False, label='State', widget=forms.TextInput(attrs={'class':'form-control'}))
zipcode = forms.CharField(required=False, label='Zipcode', widget=forms.TextInput(attrs={'class':'form-control'}))
phone = forms.CharField(required=False, label='Phone', widget=forms.TextInput(attrs={'class':'form-control'}))
#def clean_store_text(self): |
filipenf/ansible | lib/ansible/plugins/connection/ssh.py | Python | gpl-3.0 | 29,029 | 0.002859 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import errno
import fcntl
import os
import pipes
import pty
import select
import subprocess
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.utils.unicode import to_bytes, to_unicode, to_str
from ansible.compat.six import text_type, binary_type
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
SSHPASS_AVAILABLE = None
class Connection(ConnectionBase):
''' ssh based connections '''
transport = 'ssh'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(['runas'])
def __init__(self, *args, **kwargs):
super(Connection, self).__init__(*args, **kwargs)
self.host = self._play_context.remote_addr
# The connection is created by running ssh/scp/sftp from the exec_command,
# put_file, and fetch_file methods, so we don't need to do any connection
# management here.
def _connect(self):
return self
@staticmethod
def _sshpass_available():
global SSHPASS_AVAILABLE
# We test once if sshpass is available, and remember the result. It
# would be nice to use distutils.spawn.find_executable for this, but
# distutils isn't always available; shutils.which() is Python3-only.
if SSHPASS_AVAILABLE is None:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
SSHPASS_AVAILABLE = True
except OSError:
SSHPASS_AVAILABLE = False
return SSHPASS_AVAILABLE
@staticmethod
def _persistence_controls(command):
'''
Takes a command array and scans it for ControlPersist and ControlPath
settings and returns two booleans indicating whether either was found.
This could be smarter, e.g. returning false if ControlPersist is 'no',
but for now we do it simple way.
'''
controlpersist = False
controlpath = False
for arg in command:
if 'controlpersist' in arg.lower():
controlpersist = True
elif 'controlpath' in arg.lower():
controlpath = True
return controlpersist, controlpath
def _add_args(self, explanation, args):
"""
Adds the given args to self._command and displays a caller-supplied
explanation of why they were added.
"""
self._command += args
display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(args), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
Takes a binary (ssh, scp, sftp) and optional extra arguments and returns
a command line as an array that can be passed to subprocess.Popen.
'''
self._command = []
## First, the command name.
# If we want to use password authentication, we have to set up a pipe to
# write the password to sshpass.
if self._play_context.password:
if not self._sshpass_available():
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
self.sshpass_pipe = os.pipe()
self._command += ['sshpass', '-d{0}'.format(self.sshpass_pipe[0])]
self._command += [binary]
## Next, additional arguments based on the configuration.
# sftp batch mode a | llows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option. However,
# sftp batch mode does no | t prompt for passwords so it must be disabled
# if not using controlpersist and using sshpass
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
if self._play_context.password:
self._add_args('disable batch mode for sshpass', ['-o', 'BatchMode=no'])
self._command += ['-b', '-']
if self._play_context.verbosity > 3:
self._command += ['-vvv']
elif binary == 'ssh':
# Older versions of ssh (e.g. in RHEL 6) don't accept sftp -q.
self._command += ['-q']
# Next, we add [ssh_connection]ssh_args from ansible.cfg.
if self._play_context.ssh_args:
args = self._split_ssh_args(self._play_context.ssh_args)
self._add_args("ansible.cfg set ssh_args", args)
# Now we add various arguments controlled by configuration file settings
# (e.g. host_key_checking) or inventory variables (ansible_ssh_port) or
# a combination thereof.
if not C.HOST_KEY_CHECKING:
self._add_args(
"ANSIBLE_HOST_KEY_CHECKING/host_key_checking disabled",
("-o", "StrictHostKeyChecking=no")
)
if self._play_context.port is not None:
self._add_args(
"ANSIBLE_REMOTE_PORT/remote_port/ansible_port set",
("-o", "Port={0}".format(self._play_context.port))
)
key = self._play_context.private_key_file
if key:
self._add_args(
"ANSIBLE_PRIVATE_KEY_FILE/private_key_file/ansible_ssh_private_key_file set",
("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(key)))
)
if not self._play_context.password:
self._add_args(
"ansible_password/ansible_ssh_pass not set", (
"-o", "KbdInteractiveAuthentication=no",
"-o", "PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey",
"-o", "PasswordAuthentication=no"
)
)
user = self._play_context.remote_user
if user:
self._add_args(
"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set",
("-o", "User={0}".format(to_bytes(self._play_context.remote_user)))
)
self._add_args(
"ANSIBLE_TIMEOUT/timeout set",
("-o", "ConnectTimeout={0}".format(self._play_context.timeout))
)
# Add in any common or binary-specific arguments from the PlayContext
# (i.e. inventory or task settings or overrides on the command line).
for opt in ['ssh_common_args', binary + '_extra_args']:
attr = getattr(self._play_context, opt, None)
if attr is not None:
args = self._split_ssh_args(attr)
self._add_args("PlayContext set %s" % opt, args)
# Check if ControlPersist is enabled and add a ControlPath if one hasn't
# already been set.
controlpersist, controlpath = self._persistence_controls(self._command)
if controlpersist:
self._persistent = True
if not controlpath:
cpdir = unfrackpath('$HOME/.ansible/cp')
# The directory must exist and be writable.
makedirs_safe(cpdir, 0o700)
if not os.access(cpdir, os.W_OK):
|
oleg-toporkov/python-bdd-selenium | core/decorators.py | Python | mit | 790 | 0.001266 | """
Created on September 18, 2015
@author: oleg-toporkov
"""
from functools import wraps
import logging
def log_exception(mess | age, logger=None):
"""
Decorator for function execution in try/except with first logging what tried to do and then raising an exception
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if logger is None:
_self = args[0] | # self
log = getattr(_self, 'logger')
else:
log = logger
assert isinstance(log, logging.Logger)
try:
return func(*args, **kwargs)
except Exception:
log.error(message.format(args[1]))
raise
return wrapper
return decorator
|
rleigh-dundee/openmicroscopy | components/tools/OmeroPy/test/gatewaytest/user.py | Python | gpl-2.0 | 6,878 | 0.002472 | #!/usr/bin/env python
"""
gateway tests - Users
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import unittest
import omero
import gatewaytest.library as lib
from omero.gateway.scripts import dbhelpers
class UserTest (lib.GTest):
def testUsers (self):
self.loginAsUser()
# Try reconnecting without disconnect
self._has_connected = False
self.doConnect()
self.loginAsAuthor()
self.loginAsAdmin()
def testSaveAs (self):
for u in (self.AUTHOR, self.ADMIN):
# Test image should be owned by author
self.loginAsAuthor()
image = self.getTestImage()
ownername = image.getOwnerOmeName()
# Now login as author or admin
self.doLogin(u)
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
image = self.getTestImage()
self.assertEqual(ownername, self.AUTHOR.name)
# Create some object
param = omero.sys.Parameters()
param.map = {'ns': omero.rtypes.rstring('weblitz.UserTest.testSaveAs')}
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 0)
self.gateway.SERVICE_OPTS.setOmeroGroup()
ann = omero.gateway.CommentAnnotationWrapper(conn=self.gateway)
ann.setNs(param.map['ns'].val)
ann.setValue('foo')
ann.saveAs(image.getDetails())
# Annotations are owned by author
self.loginAsAuthor()
try:
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 1)
self.assertEqual(omero.gateway.CommentAnnotationWrapper(self.gateway, anns[0]).getOwnerOmeName(), self | .AUTHOR.name)
finally:
self.gateway.getUpdateService().deleteObject(ann._obj)
anns = self.gateway.getQueryService().findAllByQuery('from CommentAnnotation as a where a.ns=:ns', param)
self.assertEqual(len(anns), 0)
def testCrossGroupSave | (self):
self.loginAsUser()
uid = self.gateway.getUserId()
self.loginAsAdmin()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = self.getTestDataset()
did = d.getId()
g = d.getDetails().getGroup()
admin = self.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g._obj])
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
# make sure the group is groupwrite enabled
perms = str(d.getDetails().getGroup().getDetails().permissions)
admin.changePermissions(g._obj, omero.model.PermissionsI('rwrw--'))
d = self.getTestDataset()
g = d.getDetails().getGroup()
self.assert_(g.getDetails().permissions.isGroupWrite())
self.loginAsUser()
# User is now a member of the group to which testDataset belongs, which has groupWrite==True
# But the default group for User is diferent
try:
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
d = self.getTestDataset()
did = d.getId()
n = d.getName()
d.setName(n+'_1')
d.save()
d = self.gateway.getObject('dataset', did)
self.assertEqual(d.getName(), n+'_1')
d.setName(n)
d.save()
d = self.gateway.getObject('dataset', did)
self.assertEqual(d.getName(), n)
finally:
self.loginAsAdmin()
admin = self.gateway.getAdminService()
# Revert group permissions and remove user from group
admin.changePermissions(g._obj, omero.model.PermissionsI(perms))
admin.removeGroups(omero.model.ExperimenterI(uid, False), [g._obj])
def testCrossGroupRead (self):
self.loginAsAuthor()
u = self.gateway.getUpdateService()
p = self.getTestProject()
self.assertEqual(str(p.getDetails().permissions)[4], '-')
d = p.getDetails()
g = d.getGroup()
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assert_(not g.getId() in self.gateway.getEventContext().memberOfGroups)
self.assertEqual(self.gateway.getObject('project', p.getId()), None)
def testGroupOverObjPermissions (self):
""" Object accesss must be dependent only of group permissions """
ns = 'omero.test.ns'
# Author
self.loginAsAuthor()
# create group with rw----
# create project and annotation in that group
p = dbhelpers.ProjectEntry('testAnnotationPermissions', None, create_group='testAnnotationPermissions', group_perms='rw----')
try:
p = p.create(self.gateway)
except dbhelpers.BadGroupPermissionsException:
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.changePermissions(admin.lookupGroup('testAnnotationPermissions'), omero.model.PermissionsI('rw----'))
self.loginAsAuthor()
p = p.create(self.gateway)
pid = p.getId()
g = p.getDetails().getGroup()._obj
try:
# Admin
# add User to group
self.loginAsUser()
uid = self.gateway.getUserId()
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.addGroups(omero.model.ExperimenterI(uid, False), [g])
# User
# try to read project and annotation, which fails
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assertEqual(self.gateway.getObject('project', pid), None)
# Admin
# Chmod project to rwrw--
self.loginAsAdmin()
admin = self.gateway.getAdminService()
admin.changePermissions(g, omero.model.PermissionsI('rwrw--'))
# Author
# check project has proper permissions
self.loginAsAuthor()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
pa = self.gateway.getObject('project', pid)
self.assertNotEqual(pa, None)
# User
# read project and annotation
self.loginAsUser()
self.gateway.SERVICE_OPTS.setOmeroGroup('-1')
self.assertNotEqual(self.gateway.getObject('project', pid), None)
finally:
self.loginAsAuthor()
handle = self.gateway.deleteObjects('Project', [p.getId()], deleteAnns=True, deleteChildren=True)
self.waitOnCmd(self.gateway.c, handle)
if __name__ == '__main__':
unittest.main()
|
ajpina/uffema | uffema/machines/synchronous.py | Python | apache-2.0 | 2,520 | 0.000397 | #!/usr/bin/python
# -*- coding: iso-8859-15 -*-
# ==========================================================================
# Copyright (C) 2016 Dr. Alejandro Pina Ortega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==========================================================================
"""
Base class for synchronous motors.
"""
# ========================== | ================================================
# Program: synchronous.py
# Author: ajpina
# Date: 12/23/16
# Version: 0.1.1
#
# Revision History:
# Date Version Author Description
# - 12/23/16: 0.1.1 ajpina Defines mandatory methods a | nd properties
#
# ==========================================================================
__author__ = 'ajpina'
from abc import abstractmethod
from uffema.machines import RotatingMachine
class Synchronous(RotatingMachine):
@property
@abstractmethod
def stator(self):
return 'Should never see this'
@stator.setter
@abstractmethod
def stator(self, new_stator):
return
@property
@abstractmethod
def rotor(self):
return 'Should never see this'
@rotor.setter
@abstractmethod
def rotor(self, new_rotor):
return
@property
@abstractmethod
def flux(self):
return 'Should never see this'
@flux.setter
@abstractmethod
def flux(self, new_flux):
return
@property
@abstractmethod
def mode(self):
return 'Should never see this'
@mode.setter
@abstractmethod
def mode(self, new_mode):
return
@property
@abstractmethod
def type(self):
return 'Should never see this'
@type.setter
@abstractmethod
def type(self, new_type):
return
def get_machine_type(self):
return 'Synchronous'
def __init__(self, machine_settings, machine_type):
RotatingMachine.__init__(self, machine_settings, machine_type)
self.type = self.type + 'Synchronous::'
|
bsmr-eve/Pyfa | scripts/conversion.py | Python | gpl-3.0 | 23,244 | 0.007013 | # Developed for module tiericide, this script will quickly print out a market
# conversion map based on patch notes, as well as database conversion mapping.
import argparse
import os.path
import sqlite3
import sys
# Add eos root path to sys.path so we can import ourselves
path = os.path.dirname(str(__file__, sys.getfilesystemencoding()))
sys.path.append(os.path.realpath(os.path.join(path, "..")))
# change to correct conversion
rename_phrase = " is now known as "
conversion_phrase = " is being converted to "
text = """Partial Weapon Navigation is being converted to Phased Scoped Target Painter
Indirect Scanning Dampening Unit I is being converted to Phased Muon Scoped Sensor Dampener
'Broker' Remote Sensor Dampener I is being converted to 'Executive' Remote Sensor Dampener
Initiated Ion Field ECM I is being converted to Hypnos Scoped Magnetometric ECM
FZ-3 Subversive Spatial Destabilizer ECM is being converted to BZ-5 Scoped Gravimetric ECM
'Penumbra' White Noise ECM is being converted to Umbra Scoped Radar ECM
Faint Phase Inversion ECM I is being converted to Enfeebling Scoped Ladar ECM
'Hypnos' Multispectral ECM I is being converted to Compulsive Scoped Multispectral ECM
1Z-3 Subversive ECM Eruption is being converted to Cetus Scoped Burst Jammer
'Prayer' Remote Tracking Computer is being converted to P-S Compact Remote Tracking Computer
'Tycoon' Remote Tracking Computer is being converted to 'Enterprise' Remote Tracking Computer
Monopulse Tracking Mechanism I is being converted to F-12 Enduring Tracking Computer
'Orion' Tracking CPU I is being converted to Optical Compact Tracking Computer
'Economist' Tracking Computer I is being converted to 'Marketeer' Tracking Computer
Beta-Nought Tracking Mode is being converted to 'Basic' Tracking Enhancer
Azimuth Descalloping Tracking Enhancer is being converted to 'Basic' Tracking Enhancer
F-AQ Delay-Line Scan Tracking Subroutines is being converted to 'Basic' Tracking Enhancer
Beam Parallax Tracking Program is being converted to 'Basic' Tracking Enhancer
Sigma-Nought Tracking Mode I is being converted to Fourier Compact Tracking Enhancer
Auto-Gain Control Tracking Enhancer I is being converted to Fourier Compact Tracking Enhancer
F-aQ Phase Code Tracking Subroutines is being converted to Fourier Compact Tracking Enhancer
Lateral Gyrostabilizer is being converted to 'Basic' Gyrostabilizer
F-M2 Weapon Inertial Suspensor is being converted to 'Basic' Gyrostabilizer
Hydraulic Stabilization Actuator is being converted to 'Basic' Gyrostabilizer
Stabilized Weapon Mounts is being converted to 'Basic' Gyrostabilizer
Cross-Lateral Gyrostabilizer I is being converted to Counterbalanced Compact Gyrostabilizer
F-M3 Munition Inertial Suspensor is being converted to Counterbalanced Compact Gyrostabilizer
Pneumatic Stabilization Actuator I is being converted to Counterbalanced Compact Gyrostabilizer
Monophonic Stabilization Actuator I is being converted to 'Kindred' Gyrostabilizer
Monophonic Stabilization Actuator I Blueprint is being converted to 'Kindred' Gyrostabilizer Blueprint
Heat Exhaust System is being converted to 'Basic' Heat Sink
C3S Convection Thermal Radiator is being converted to 'Basic' Heat Sink
'Boreas' Coolant System is being converted to 'Basic' Heat Sink
Stamped Heat Sink is being converted to 'Basic' Heat Sink
Thermal Exhaust System I is being converted to Extruded Compact Heat Sink
C4S Coiled Circuit Thermal Radiator is being converted to Extruded Compact Heat Sink
'Skadi' Coolant System I is being converted to Extruded Compact Heat Sink
'Mangonel' Heat Sink I is being converted to 'Trebuchet' Heat Sink I
'Mangonel' Heat Sink I Blueprint is being converted to 'Trebuchet' Heat Sink Blueprint
Insulated Stabilizer Array is being converted to 'Basic' Magnetic Field Stabilizer
Linear Flux Stabilizer is being converted to 'Basic' Magnetic Field Stabilizer
Gauss Field Balancer is being converted to 'Basic' Magnetic Field Stabilizer
Magnetic Vortex Stabilizer is being converted to 'Basic' Magnetic Field Stabilizer
Insulated Stabilizer Array I is being converted to Vortex Compact Magnetic Field Stabilizer
Linear Flux Stabilizer I is being converted to Vortex Compact Magnetic Field Stabilizer
Gauss Field Balancer I is being converted to Vortex Compact Magnetic Field Stabilizer
'Capitalist' Magnetic Field Stabilizer I is being converted to 'Monopoly' Magnetic Field Stabilizer
'Capitalist' Magnetic Field Stabilizer I Blueprint is being converted to 'Monopoly' Magnetic Field Stabilizer Blueprint
Muon Coil Bolt Array I is being converted to Crosslink Compact Ballistic Control System
Multiphasic Bolt Array I is being converted to Crosslink Compact Ballistic Control System
'Pandemonium' Ballistic Enhancement is being converted to Crosslink Compact Ballistic Control System
Ballistic 'Purge' Targeting System I is being converted to 'Full Duplex' Ballistic Control System
Ballistic 'Purge' Targeting System I Blueprint is being converted to 'Full Duplex' Ballistic Control System Blueprint
'Langour' Drive Disruptor I is being converted to X5 Enduring Stasis Webifier
Patterned Stasis Web I is being converted to Fleeting Compact Stasis Webifier
Fleeting Progressive Warp Scrambler I is being converted to Faint Epsilon Scoped Warp Scrambler
Fleeting Warp Disruptor I is being converted to Faint Scoped Warp Disruptor
GLFF Containment Field is being converted to 'Basic' Damage Control
Interior Force Field Array is being converted to 'Basic' Damage Control
F84 Local Damage System is being converted to 'Basic' Damage Control
Systematic Damage Control is being converted to 'Basic' Damage C | ontrol
'Gonzo' Damage Control I is bein | g converted to 'Radical' Damage Control
'Gonzo' Damage Control I Blueprint is being converted to 'Radical' Damage Control Blueprint
Emergency Damage Control I is being converted to IFFA Compact Damage Control
F85 Peripheral Damage System I is being converted to IFFA Compact Damage Control
Pseudoelectron Containment Field I is being converted to IFFA Compact Damage Control
Micro Ld-Acid Capacitor Battery I is being converted to 'Micro' Cap Battery
Micro Ohm Capacitor Reserve I is being converted to 'Micro' Cap Battery
Micro F-4a Ld-Sulfate Capacitor Charge Unit is being converted to 'Micro' Cap Battery
Micro Peroxide Capacitor Power Cell is being converted to 'Micro' Cap Battery
Micro Capacitor Battery II is being converted to 'Micro' Cap Battery
Small Ohm Capacitor Reserve I is being converted to Small Compact Pb-Acid Cap Battery
Small F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Small Compact Pb-Acid Cap Battery
Small Peroxide Capacitor Power Cell is being converted to Small Compact Pb-Acid Cap Battery
Medium Ohm Capacitor Reserve I is being converted to Medium Compact Pb-Acid Cap Battery
Medium F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Medium Compact Pb-Acid Cap Battery
Medium Peroxide Capacitor Power Cell is being converted to Medium Compact Pb-Acid Cap Battery
Large Ohm Capacitor Reserve I is being converted to Large Compact Pb-Acid Cap Battery
Large F-4a Ld-Sulfate Capacitor Charge Unit is being converted to Large Compact Pb-Acid Cap Battery
Large Peroxide Capacitor Power Cell is being converted to Large Compact Pb-Acid Cap Battery
ECCM - Radar I is being converted to Sensor Booster I
ECCM - Ladar I is being converted to Sensor Booster I
ECCM - Magnetometric I is being converted to Sensor Booster I
ECCM - Gravimetric I is being converted to Sensor Booster I
ECCM - Omni I is being converted to Sensor Booster I
ECCM - Radar I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Ladar I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Magnetometric I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Gravimetric I Blueprint is being converted to Sensor Booster I Blueprint
ECCM - Omni I Blueprint is being converted to Sensor Booster I Blueprint
Alumel Radar ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster
Alumel Ladar ECCM Sensor Array I is being converted to Alumel-Wired Enduring Sensor Booster
Alumel Gravimetric ECCM Sensor Array I is being converted to Alumel-Wired Endu |
inveniosoftware/invenio-previewer | setup.py | Python | mit | 3,916 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2020 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio module for previewing files."""
import os
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand # noqa
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
tests_require = [
'invenio-config>=1.0.2',
'invenio-theme>=1.3.0a10',
'invenio-db[versioning]>=1.0.9',
'mock>=1.3.0',
'pytest-invenio>=1.4.0',
]
extras_require = {
'docs': [
'Sphinx>=3.3.1,<3.4.2',
],
'files': [
'invenio-files-rest>=1.0.0',
'invenio-records-files>=1.1.0',
],
'tests': tests_require,
}
extras_require['all'] = []
for reqs in extras_require.values():
extras_require['all'].extend(reqs)
setup_requires = [
'Babel>=2.8',
]
install_requires = [
'cchardet>=1.0.0',
'invenio-assets>=1.2.2',
'invenio-base>=1.2.4',
'invenio-formatter>=1.0.3',
'invenio-i18n>=1.3.0a1',
'invenio-pidstore>=1.2.2',
'invenio-records-ui>=1.1.0',
'ipython>=4.1.0',
'mistune>=0.7.2',
# NOTE: nbclient package provides execute in nbconvert >= 6.X
'nbconvert[execute]>=4.1.0,<6.0.0',
'nbformat>=4.0.1',
'tornado>=4.1,<=5.1.1', # required by nbconvert -> jupyter-client
]
packages = find_packages()
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('invenio_previewer', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
setup(
name='invenio-previewer',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='invenio previewer',
license='MIT',
author='CERN',
author_email='info@inveniosoftware.org',
url='https://github.com/inveniosoftware/invenio-previewer',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={
'invenio_base.apps': [
'invenio_previewer = invenio_previewer:InvenioPreviewer',
],
'invenio_i18n.translations': [
'messages = invenio_previewer',
],
'invenio_assets.webpack': {
'invenio_previewer_theme = invenio_previewer.webpack:previewer'
},
'invenio_previewer.previewers': [
'csv_dthreejs = invenio_previewer.extensions.csv_dthreejs',
'json_prismjs = invenio_previewer.extensions.json_prismjs',
'simple_image = invenio_previewer.extensions.simple_image',
'xml_prismjs = invenio_previewer.extensions.xml_prismjs',
'mistune = invenio_previewer.extensions.mistune',
'txt = invenio_previewer.extensions.txt',
'pdfjs = invenio_previewer.extensions.pdfjs',
'zip = invenio_previewer.extensions.zip',
'ipynb = invenio_previewer.extensions.ipynb',
'default = invenio_previewer.extensions.default',
],
},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3',
'Programming | Language :: Python :: 3.6',
'Programming Language :: Python : | : 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Development Status :: 5 - Production/Stable',
],
)
|
eagleamon/home-assistant | homeassistant/components/notify/ios.py | Python | apache-2.0 | 3,679 | 0 | """
iOS push notification platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/ecosystem/ios/notifications/
"""
import logging
from datetime import datetime, timezone
import requests
from homeassistant.components import ios
import homeassistant.util.dt as dt_util
from homeassistant.components.notify import (
ATTR_TARGET, ATTR_TITLE, ATTR_TITLE_DEFAULT, ATTR_MESSAGE,
ATTR_DATA, BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
PUSH_URL = "https://ios-push.home-assistant.io/push"
DEPENDENCIES = ["ios"]
# pylint: disable=invalid-name
def log_rate_limits(target, resp, level=20):
"""Output rate limit log line at given level."""
rate_limits = resp["rateLimits"]
resetsAt = dt_util.parse_datetime(rate_limits["resetsAt"])
resetsAtTime = resetsAt - datetime.now(timezone.utc)
rate_limit_msg = ("iOS push notification rate limits for %s: "
"%d sent, %d allowed, %d errors, "
"resets in %s")
_LOGGER.log(level, rate_limit_msg,
ios.device_name_for_push_id(target),
rate_limits["successful"],
rate_limits["maximum"], rate_limits["errors"],
str(resetsAtTime).split(".")[0])
def get_service(hass, config, discovery_info=None):
"""Get the iOS notification service."""
if "notify.ios" not in hass.config.components:
# Need this to enable requirements checking in the app.
hass.config.components.append("notify.ios")
if not ios.devices_with_push():
_LOGGER.error(("The notify.ios platform was loaded but no "
"devices exist! Please check the documentation at "
"https://home-assistant.io/ecosystem/ios/notifications"
"/ for more information"))
return None
return iOSNotificationService()
class iOSNotificationService(BaseNotificationService):
"""Implement the notification service for iOS."""
def __init__(self):
"""Initialize the service."""
@property
def targets(self):
"""Return a dictionary of registered targets."""
return ios.devices_with_push()
def send_message(self, message="", **kwargs):
"""Send a message to the Lambda APNS gateway."""
data = {ATTR_MESSAGE: message}
if kwargs.get(ATTR_TITLE) is not None:
# Remove default title from notifications.
if kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT:
data[ATTR_TITLE] = kwargs.get(ATTR_TITLE)
| targets = kwargs.get(ATTR_TARGET)
if not targets:
targets = ios.enabled_push_ids()
if kwargs.get(ATTR_DATA) is not None:
data[ATTR_DATA] = kwargs.get(ATTR_DATA)
for target in targets:
data[ATTR_TARGET] = target
req = requests.post(PUSH_URL, json=data, timeout=10)
if req.status_code != 201:
| fallback_error = req.json().get("errorMessage",
"Unknown error")
fallback_message = ("Internal server error, "
"please try again later: "
"{}").format(fallback_error)
message = req.json().get("message", fallback_message)
if req.status_code == 429:
_LOGGER.warning(message)
log_rate_limits(target, req.json(), 30)
else:
_LOGGER.error(message)
else:
log_rate_limits(target, req.json())
|
kayhayen/Nuitka | tests/syntax/GlobalForParameter.py | Python | apache-2.0 | 793 | 0 | # Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache Lice | nse, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable l | aw or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def f(a):
global a
|
explosion/spaCy | spacy/tests/doc/test_retokenize_merge.py | Python | mit | 18,955 | 0.001266 | import pytest
from spacy.attrs import LEMMA
from spacy.vocab import Vocab
from spacy.tokens import Doc, Token
def test_doc_retokenize_merge(en_tokenizer):
text = "WKRO played songs by the beach boys all night"
attrs = {
"tag": "NAMED",
"lemma": "LEMMA",
"ent_type": "TYPE",
"morph": "Number=Plur",
}
doc = en_tokenizer(text)
assert len(doc) == 9
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[4:7], attrs=attrs)
retokenizer.merge(doc[7:9], attrs=attrs)
assert len(doc) == 6
assert doc[4].text == "the beach boys"
assert doc[4].text_with_ws == "the beach boys "
assert doc[4].tag_ == "NAMED"
assert doc[4].lemma_ == "LEMMA"
assert str(doc[4].morph) == "Number=Plur"
assert doc[5].text == "all night"
assert doc[5].text_with_ws == "all night"
assert doc[5].tag_ == "NAMED"
assert str(doc[5].morph) == "Number=Plur"
assert doc[5].lemma_ == "LEMMA"
def test_doc_retokenize_merge_children(en_tokenizer):
"""Test that attachments work correctly after merging."""
text = "WKRO played songs by the beach boys all night"
attrs = {"tag": "NAMED", "lemma": "LEMMA", "ent_type": "TYPE"}
doc = en_tokenizer(text)
assert len(doc) == 9
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[4:7], attrs=attrs)
for word in doc:
if word.i < word.head.i:
assert word in list(word.head.lefts)
elif word.i > word.head.i:
assert word in list(word.head.rights)
def test_doc_retokenize_merge_hang(en_tokenizer):
text = "through North and South Carolina"
doc = en_tokenizer(text)
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[3:5], attrs={"lemma": "", "ent_type": "ORG"})
retokenizer.merge(doc[1:2], attrs={"lemma": "", "ent_type": "ORG"})
def test_doc_retokenize_retokenizer(en_tokenizer):
doc = en_tokenizer("WKRO played songs by the beach boys all night")
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[4:7])
assert len(doc) == 7
assert doc[4].text == "the beach boys"
def test_doc_retokenize_retokenizer_attrs(en_tokenizer):
doc = en_tokenizer("WKRO played songs by the beach boys all night")
# test both string and integer attributes and values
attrs = {LEMMA: "boys", "ENT_TYPE": doc.vocab.strings["ORG"]}
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[4:7], attrs=attrs)
assert len(doc) == 7
assert doc[4].text == "the beach boys"
assert doc[4].lemma_ == "boys"
assert doc[4].ent_type_ == "ORG"
def test_doc_retokenize_lex_attrs(en_tokenizer):
"""Test that lexical attributes can be changed (see #2390)."""
doc = en_tokenizer("WKRO played | beach boys songs")
assert not any(token.is_stop for token in doc)
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[2:4], attrs={"LEMMA": "boys", "IS_STOP": True})
assert doc[2].text == "beach boys"
assert doc[2].lemma_ == "boys"
assert doc[2].is_stop
new_doc = Doc(doc.vocab, words=["beach boys"])
assert new_doc[0].is_stop
def test_doc_retokenize_spans_merge_tokens(en_tokenizer):
text = "Los Angeles start."
heads = [1, 2, 2, 2]
deps = | ["dep"] * len(heads)
tokens = en_tokenizer(text)
doc = Doc(tokens.vocab, words=[t.text for t in tokens], heads=heads, deps=deps)
assert len(doc) == 4
assert doc[0].head.text == "Angeles"
assert doc[1].head.text == "start"
with doc.retokenize() as retokenizer:
attrs = {"tag": "NNP", "lemma": "Los Angeles", "ent_type": "GPE"}
retokenizer.merge(doc[0:2], attrs=attrs)
assert len(doc) == 3
assert doc[0].text == "Los Angeles"
assert doc[0].head.text == "start"
assert doc[0].ent_type_ == "GPE"
def test_doc_retokenize_spans_merge_tokens_default_attrs(en_vocab):
words = ["The", "players", "start", "."]
lemmas = [t.lower() for t in words]
heads = [1, 2, 2, 2]
deps = ["dep"] * len(heads)
tags = ["DT", "NN", "VBZ", "."]
pos = ["DET", "NOUN", "VERB", "PUNCT"]
doc = Doc(
en_vocab, words=words, tags=tags, pos=pos, heads=heads, deps=deps, lemmas=lemmas
)
assert len(doc) == 4
assert doc[0].text == "The"
assert doc[0].tag_ == "DT"
assert doc[0].pos_ == "DET"
assert doc[0].lemma_ == "the"
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[0:2])
assert len(doc) == 3
assert doc[0].text == "The players"
assert doc[0].tag_ == "NN"
assert doc[0].pos_ == "NOUN"
assert doc[0].lemma_ == "the players"
doc = Doc(
en_vocab, words=words, tags=tags, pos=pos, heads=heads, deps=deps, lemmas=lemmas
)
assert len(doc) == 4
assert doc[0].text == "The"
assert doc[0].tag_ == "DT"
assert doc[0].pos_ == "DET"
assert doc[0].lemma_ == "the"
with doc.retokenize() as retokenizer:
retokenizer.merge(doc[0:2])
retokenizer.merge(doc[2:4])
assert len(doc) == 2
assert doc[0].text == "The players"
assert doc[0].tag_ == "NN"
assert doc[0].pos_ == "NOUN"
assert doc[0].lemma_ == "the players"
assert doc[1].text == "start ."
assert doc[1].tag_ == "VBZ"
assert doc[1].pos_ == "VERB"
assert doc[1].lemma_ == "start ."
def test_doc_retokenize_spans_merge_heads(en_vocab):
words = ["I", "found", "a", "pilates", "class", "near", "work", "."]
heads = [1, 1, 4, 6, 1, 4, 5, 1]
deps = ["dep"] * len(heads)
doc = Doc(en_vocab, words=words, heads=heads, deps=deps)
assert len(doc) == 8
with doc.retokenize() as retokenizer:
attrs = {"tag": doc[4].tag_, "lemma": "pilates class", "ent_type": "O"}
retokenizer.merge(doc[3:5], attrs=attrs)
assert len(doc) == 7
assert doc[0].head.i == 1
assert doc[1].head.i == 1
assert doc[2].head.i == 3
assert doc[3].head.i == 1
assert doc[4].head.i in [1, 3]
assert doc[5].head.i == 4
def test_doc_retokenize_spans_merge_non_disjoint(en_tokenizer):
text = "Los Angeles start."
doc = en_tokenizer(text)
with pytest.raises(ValueError):
with doc.retokenize() as retokenizer:
retokenizer.merge(
doc[0:2],
attrs={"tag": "NNP", "lemma": "Los Angeles", "ent_type": "GPE"},
)
retokenizer.merge(
doc[0:1],
attrs={"tag": "NNP", "lemma": "Los Angeles", "ent_type": "GPE"},
)
def test_doc_retokenize_span_np_merges(en_tokenizer):
text = "displaCy is a parse tool built with Javascript"
heads = [1, 1, 4, 4, 1, 4, 5, 6]
deps = ["dep"] * len(heads)
tokens = en_tokenizer(text)
doc = Doc(tokens.vocab, words=[t.text for t in tokens], heads=heads, deps=deps)
assert doc[4].head.i == 1
with doc.retokenize() as retokenizer:
attrs = {"tag": "NP", "lemma": "tool", "ent_type": "O"}
retokenizer.merge(doc[2:5], attrs=attrs)
assert doc[2].head.i == 1
text = "displaCy is a lightweight and modern dependency parse tree visualization tool built with CSS3 and JavaScript."
heads = [1, 1, 10, 7, 3, 3, 7, 10, 9, 10, 1, 10, 11, 12, 13, 13, 1]
deps = ["dep"] * len(heads)
tokens = en_tokenizer(text)
doc = Doc(tokens.vocab, words=[t.text for t in tokens], heads=heads, deps=deps)
with doc.retokenize() as retokenizer:
for ent in doc.ents:
attrs = {"tag": ent.label_, "lemma": ent.lemma_, "ent_type": ent.label_}
retokenizer.merge(ent, attrs=attrs)
text = "One test with entities like New York City so the ents list is not void"
heads = [1, 1, 1, 2, 3, 6, 7, 4, 12, 11, 11, 12, 1, 12, 12]
deps = ["dep"] * len(heads)
tokens = en_tokenizer(text)
doc = Doc(tokens.vocab, words=[t.text for t in tokens], heads=heads, deps=deps)
with doc.retokenize() as retokenizer:
for ent in doc.ents:
retokenizer.merge(ent)
def test_doc_retokenize_spans_entity_merge(en_tokenizer):
# fmt: off
text = "Stewart Lee is a stand up comedian who lives in England and loves Joe Pasquale.\n"
heads = [1, 2, 2, 4, 6, 4, 2, 8, 6, 8, |
quantumlib/Cirq | cirq-google/cirq_google/transformers/analytical_decompositions/two_qubit_to_sycamore.py | Python | apache-2.0 | 18,563 | 0.004902 | # Copyright 2022 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility methods for decomposing two-qubit unitaries into Sycamore gates."""
from typing import Iterator, List, Optional
import itertools
import math
import numpy as np
import cirq
from cirq_google import ops
def _decompose_arbitrary_into_syc_tabulation(
op: cirq.Operation, tabulation: cirq.TwoQubitGateTabulation
) -> cirq.OP_TREE:
"""Synthesize | an arbitrary 2 qubit operation to a Sycamore operation using the given Tabulation.
Args:
op: Operation to decompose.
tabulation: A `cirq.TwoQubitGateTabulation` for the Sycamore gate.
Yields:
A `cirq.OP_TREE` that performs the given operation using Sycamore operations.
"""
qubit_a, qubit_b = op.qu | bits
result = tabulation.compile_two_qubit_gate(cirq.unitary(op))
local_gates = result.local_unitaries
for i, (gate_a, gate_b) in enumerate(local_gates):
yield from _phased_x_z_ops(gate_a, qubit_a)
yield from _phased_x_z_ops(gate_b, qubit_b)
if i != len(local_gates) - 1:
yield ops.SYC.on(qubit_a, qubit_b)
def two_qubit_matrix_to_sycamore_operations(
q0: cirq.Qid,
q1: cirq.Qid,
mat: np.ndarray,
*,
atol: float = 1e-8,
clean_operations: bool = True,
) -> cirq.OP_TREE:
"""Decomposes a two-qubit unitary matrix into `cirq_google.SYC` + single qubit rotations.
The analytical decomposition first Synthesizes the given operation using `cirq.CZPowGate` +
single qubit rotations and then decomposes each `cirq.CZPowGate` into `cirq_google.SYC` +
single qubit rotations using `cirq_google.known_2q_op_to_sycamore_operations`.
Note that the resulting decomposition may not be optimal, and users should first try to
decompose a given operation using `cirq_google.known_2q_op_to_sycamore_operations`.
Args:
q0: The first qubit being operated on.
q1: The other qubit being operated on.
mat: Defines the operation to apply to the pair of qubits.
atol: A limit on the amount of absolute error introduced by the
construction.
clean_operations: Merges runs of single qubit gates to a single `cirq.PhasedXZGate` in
the resulting operations list.
Returns:
A `cirq.OP_TREE` that implements the given unitary operation using only `cirq_google.SYC` +
single qubit rotations.
"""
decomposed_ops: List[cirq.OP_TREE] = []
for op in cirq.two_qubit_matrix_to_operations(
q0, q1, mat, allow_partial_czs=True, atol=atol, clean_operations=clean_operations
):
if cirq.num_qubits(op) == 2:
decomposed_cphase = known_2q_op_to_sycamore_operations(op)
assert decomposed_cphase is not None
decomposed_ops.append(decomposed_cphase)
else:
decomposed_ops.append(op)
return (
[*cirq.merge_single_qubit_gates_to_phxz(cirq.Circuit(decomposed_ops)).all_operations()]
if clean_operations
else decomposed_ops
)
def known_2q_op_to_sycamore_operations(op: cirq.Operation) -> Optional[cirq.OP_TREE]:
"""Synthesizes a known two-qubit operation using `cirq_google.SYC` + single qubit rotations.
This function dispatches to various known gate decompositions based on gate type. Currently,
the following gates are known:
1. Adjacent `cirq.SWAP` and `cirq.ZPowGate` wrapped in a circuit operation of length 2.
2. `cirq.PhasedISwapPowGate` with exponent = 1 or phase_exponent = 0.25.
3. `cirq.SWAP`, `cirq.ISWAP`.
4. `cirq.CNotPowGate`, `cirq.CZPowGate`, `cirq.ZZPowGate`.
Args:
op: Operation to decompose.
Returns:
- A `cirq.OP_TREE` that implements the given known operation using only `cirq_google.SYC` +
single qubit rotations OR
- None if `op` is not a known operation.
"""
if not (cirq.has_unitary(op) and cirq.num_qubits(op) == 2):
return None
q0, q1 = op.qubits
if isinstance(op.untagged, cirq.CircuitOperation):
flattened_gates = [o.gate for o in cirq.decompose_once(op.untagged)]
if len(flattened_gates) != 2:
return None
for g1, g2 in itertools.permutations(flattened_gates):
if g1 == cirq.SWAP and isinstance(g2, cirq.ZZPowGate):
return _swap_rzz(g2.exponent * np.pi / 2, q0, q1)
gate = op.gate
if isinstance(gate, cirq.PhasedISwapPowGate):
if math.isclose(gate.exponent, 1):
return _decompose_phased_iswap_into_syc(gate.phase_exponent, q0, q1)
if math.isclose(gate.phase_exponent, 0.25):
return _decompose_phased_iswap_into_syc_precomputed(gate.exponent * np.pi / 2, q0, q1)
return None
if isinstance(gate, cirq.CNotPowGate):
return [
cirq.Y(q1) ** -0.5,
_decompose_cphase_into_syc(gate.exponent * np.pi, q0, q1),
cirq.Y(q1) ** 0.5,
]
if isinstance(gate, cirq.CZPowGate):
return (
_decompose_cz_into_syc(q0, q1)
if math.isclose(gate.exponent, 1)
else _decompose_cphase_into_syc(gate.exponent * np.pi, q0, q1)
)
if isinstance(gate, cirq.SwapPowGate) and math.isclose(gate.exponent, 1):
return _decompose_swap_into_syc(q0, q1)
if isinstance(gate, cirq.ISwapPowGate) and math.isclose(gate.exponent, 1):
return _decompose_iswap_into_syc(q0, q1)
if isinstance(gate, cirq.ZZPowGate):
return _rzz(gate.exponent * np.pi / 2, q0, q1)
return None
def _decompose_phased_iswap_into_syc(
phase_exponent: float, a: cirq.Qid, b: cirq.Qid
) -> cirq.OP_TREE:
"""Decomposes `cirq.PhasedISwapPowGate` with an exponent of 1 into Sycamore gates.
This should only be called if the gate has an exponent of 1. Otherwise,
`_decompose_phased_iswap_into_syc_precomputed` should be used instead. The advantage of using
this function is that the resulting circuit will be smaller.
Args:
phase_exponent: The exponent on the Z gates.
a: First qubit to operate on.
b: Second qubit to operate on.
Yields:
A `cirq.OP_TREE` implementing the `cirq.PhasedISwapPowGate` gate using Sycamore gates.
"""
yield cirq.Z(a) ** phase_exponent,
yield cirq.Z(b) ** -phase_exponent,
yield _decompose_iswap_into_syc(a, b),
yield cirq.Z(a) ** -phase_exponent,
yield cirq.Z(b) ** phase_exponent,
def _decompose_phased_iswap_into_syc_precomputed(
theta: float, a: cirq.Qid, b: cirq.Qid
) -> cirq.OP_TREE:
"""Decomposes `cirq.PhasedISwapPowGate` into Sycamore gates using precomputed coefficients.
This should only be called if the Gate has a phase_exponent of .25. If the gate has an
exponent of 1, _decompose_phased_iswap_into_syc should be used instead. Converting PhasedISwap
gates to Sycamore is not supported if neither of these constraints are satisfied.
This synthesize a PhasedISwap in terms of four sycamore gates. This compilation converts the
gate into a circuit involving two CZ gates, which themselves are each represented as two
Sycamore gates and single-qubit rotations
Args:
theta: Rotation parameter for the phased ISWAP.
a: First qubit to operate on.
b: Second qubit to operate on.
Yields:
A `cirq.OP_TREE` implementing the `cirq.PhasedISwapPowGate` gate using Sycamore gates.
"""
yield cirq.PhasedXPowGate(phase_exponent=0.41175161497166024, exponent=0.5653807577895922).on(a)
yield cirq.PhasedXPowGate(phase_exponent=1.0, exponent=0.5).on(b),
yield (cirq.Z ** 0.709989231488347 |
gratipay/gratipay.com | tests/py/test_username_json.py | Python | mit | 3,211 | 0.003116 | # coding: utf8
from __future__ import print_function, unicode_literals
import json
from gratipay.testing import Harness
class Tests(Harness):
def change_username(self, new_username, auth_as='alice'):
if auth_as:
self.make_participant(auth_as, claimed_time='now')
r = self.client.POST('/~alice/username.json', {'username': new_username},
auth_as=auth_as, raise_immediately=False)
return r.code, json.loads(r.body)
def test_participant_can_change_their_username(self):
code, body = self.change_username("bob")
assert code == 200
assert body['username'] == "bob"
def test_anonymous_gets_401(self):
code, body = self.change_username("bob", auth_as=None)
assert code == 401
def test_empty(self):
code, body = self.change_username(' ')
assert code == 400
assert body['error_message_long'] == "You need to provide a username!"
def test_invalid(self):
code, body = self.change_username("§".encode('utf8'))
assert code == 400
assert body['error_message_long'] == "The username '§' contains invalid characters."
def test_restricted_username_without_extension(self):
code, body = self.change_username("assets")
assert code == 400
assert body['error_message_long'] == "The username 'assets' is restricted."
def test_restricted_username_with_extension(self):
code, body = self.change_username("1.0-payout")
assert code == 400
assert body['error_message_long'] == "The username '1.0-payout' is restricted."
def test_robots_txt_is_not_an_available_username(self):
code, body = self.change_username("robots.txt")
assert code == 400
assert body['error_message_long'] == "The username 'robots.txt' is restricted."
def test_but_robots_txt_is_still_available_when_theres_a_robots_username(self):
self.make_participant('robots', claimed_time='now')
# manually follow redirects from /robots -> /~robots/
response = self.client.GxT("/robots")
assert response.code == 302
assert response.headers['Location'] == '/robots/'
response = self.client.GxT("/robots/")
assert response.code == 302
assert response.headers['Location'] == '/~robots/'
response = self.client.GET("/~robots/")
assert response.code == 200
assert '<h1>~robots</h1>' in response.body
# /robots.txt
response = self.client.GET("/robots.txt")
assert response.code == 200
assert response.body == 'User-a | gent: *\nDisallow: /*.json\nDisallow: /on/*\n'
def test_unavailable(self):
self.make_participant("bob")
code, body = self.change_username("bob")
assert code == 400
assert body['error_message_long'] == "The username 'bob' is already taken."
def test_too_l | ong(self):
username = "I am way too long, and you know it, and the American people know it."
code, body = self.change_username(username)
assert code == 400
assert body['error_message_long'] == "The username '%s' is too long." % username
|
icedtrees/unsw-lecture-times | scraper/scraper.py | Python | mit | 2,622 | 0.00267 | """ Original python command-line scraper for UNSW lecture times. Unused in the webapp. """
import re
import sys
from collections import Counter
import requests
SEMESTER = "15s2"
BASE_URL = "http://www.cse.unsw.edu.au/~teachadmin/lecture_times/" + SEMESTER
DAYS = ("Monday", "Tuesday", "Wednesday", "Thursday", "Friday")
def get_courses(course_category):
index = requests.get('{}/{}'.format(BASE_URL, course_category))
subpages = re.findall(r'A HREF="(.*?\.html)"', index.text)
return ['{}/{}'.format(course_category, subpage) for subpage in subpages]
def get_lectures(user_cou | rses):
courses = []
for arg in user_courses:
if len(arg) == 4:
courses += get_courses(arg)
elif len(arg) == 8:
courses.append('{}/{}.html'.format(arg[:4], arg[4:]))
else:
print("Invalid argument: {}".format(arg))
table_texts = {}
for course in courses:
print(course)
response = requests.get('{}/{}'.f | ormat(BASE_URL, course))
if response.status_code == 200:
table_match = re.search(r'<table border=1.*?>(.*?)</table>', response.text, re.DOTALL)
table_texts[course] = table_match.group(1)
else:
print("Could not find course {}".format(course))
tables = {}
for course, tableText in table_texts.items():
tables[course] = {}
row_texts = re.findall(r'<tr>(.*?)(?=(?:$|<tr>))', tableText, re.DOTALL)
for rowText in row_texts:
time = re.search(r'<th>(.*?)<', rowText).group(1)
cells = re.findall(r'<td.*?small>(.*?)(?=(?:$|<td))', rowText)
for day, cell in zip(DAYS, cells):
cell_count = re.findall(r'([A-Z0-9]*?)\((.*?)\)', cell)
cell_count = [(name, int(count)) for name, count in cell_count]
tables[course][(day, time)] = cell_count
return tables
def total_count(lectures):
count = Counter()
for subject in lectures.values():
subject_count = {dayTime: sum([count[1] for count in subjectList]) for dayTime, subjectList in subject.items()}
count.update(subject_count)
return dict(count)
def main():
if len(sys.argv) <= 1:
exit("Usage: python lectures.py COMP1917 COMP1921 COMP1927 ...")
lectures = get_lectures(sys.argv[1:])
total = total_count(lectures)
ordered_count = sorted(total.items(), key=lambda item: (DAYS.index(item[0][0]), int(item[0][1].split(':')[0])))
print("\n".join(["{} {}: {}".format(count[0][0], count[0][1], count[1]) for count in ordered_count]))
if __name__ == '__main__':
main()
|
huyphan/pyyawhois | test/record/parser/test_response_whois_nic_sn_status_registered.py | Python | mit | 3,849 | 0.003378 |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.nic.sn/status_registered
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicSnStatusRegistered(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.sn/status_registered.txt"
host = "whois.nic.sn"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, 'registered')
def test_available(self):
eq_(self.record.available, False)
def test_domain(self):
eq_(self.record.domain, "google.sn")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(len(self.record.nameservers), 4)
eq_(self.record.nameservers[0].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[0].name, "ns1.google.com")
eq_(self.record.nameservers[1].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[1].name, "ns2.google.com")
eq_(self.record.nameservers[2].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[2].name, "ns3.google.com")
eq_(self.record.nameservers[3].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[3].name, "ns4.google.com")
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(len(self.record.admin_contacts), 1)
eq_(self.record.admin_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.admin_contacts[0].type, yawhois.record.Contact.TYPE_ADMINISTRATIVE)
eq_(self.record.admin_contacts[0].id, "C5-SN")
eq_(self.record.admin_contacts[0].name, "C5-SN")
def test_registered(self):
eq_(self.record.registered, True)
def test_created_on(self):
eq_(self.record.created_on.__class__.__name__, 'da | tetime')
eq_(self.record.created_on, time_parse('2008-05-08 17:59:38.43'))
def test_registrar(self):
eq_(self.record.registrar.__class__.__name__, 'Registrar')
eq_(self.record.registrar.id, "registry")
eq_(self.record.registrar.name, | "registry")
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(len(self.record.registrant_contacts), 1)
eq_(self.record.registrant_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.registrant_contacts[0].type, yawhois.record.Contact.TYPE_REGISTRANT)
eq_(self.record.registrant_contacts[0].id, "C4-SN")
eq_(self.record.registrant_contacts[0].name, "C4-SN")
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(len(self.record.technical_contacts), 1)
eq_(self.record.technical_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.technical_contacts[0].type, yawhois.record.Contact.TYPE_TECHNICAL)
eq_(self.record.technical_contacts[0].id, "C6-SN")
eq_(self.record.technical_contacts[0].name, "C6-SN")
def test_updated_on(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.updated_on)
def test_domain_id(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.domain_id)
def test_expires_on(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.expires_on)
def test_disclaimer(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.disclaimer)
|
DedMemez/ODS-August-2017 | coghq/CashbotMintGearRoom_Action00.py | Python | apache-2.0 | 7,793 | 0.001412 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.coghq.CashbotMintGearRoom_Action00
from panda3d.core import Point3, Vec3
from toontown.coghq.SpecImports import *
GlobalEntities = {1000: {'type': 'levelMgr',
'name': 'LevelMgr',
'comment': '',
'parentEntId': 0,
'cogLevel': 0,
'farPlaneDistance': 1500,
'modelFilename': 'phase_10/models/cashbotHQ/ZONE07a',
'wantDoors': 1},
0: {'type': 'zone',
'name': 'UberZone',
'comment': '',
'parentEntId': 0,
'scale': 1,
'description': '',
'visibility': []},
10007: {'type': 'attribModifier',
'name': 'goonStrength',
'comment': '',
'parentEntId': 0,
'attribName': 'strength',
'recursive': 1,
'typeName': | 'goon',
'value': '10'},
10002: {'type': 'goon',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10001,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1.5,
'attackR | adius': 15,
'crushCellId': None,
'goonType': 'pg',
'gridId': None,
'hFov': 70,
'strength': 10,
'velocity': 4.0},
10004: {'type': 'goon',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10003,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1.5,
'attackRadius': 15,
'crushCellId': None,
'goonType': 'pg',
'gridId': None,
'hFov': 70,
'strength': 10,
'velocity': 4},
10006: {'type': 'goon',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10005,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1.5,
'attackRadius': 15,
'crushCellId': None,
'goonType': 'pg',
'gridId': None,
'hFov': 70,
'strength': 10,
'velocity': 4},
10009: {'type': 'goon',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10008,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1.5,
'attackRadius': 15,
'crushCellId': None,
'goonType': 'pg',
'gridId': None,
'hFov': 70,
'strength': 10,
'velocity': 4},
10011: {'type': 'healBarrel',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10012,
'pos': Point3(2.15899157524, 2.29615116119, 5.45938539505),
'hpr': Vec3(331.109100342, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'rewardPerGrab': 8,
'rewardPerGrabMax': 0},
10012: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10010,
'pos': Point3(20.9361133575, 13.8672618866, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(0.920000016689, 0.920000016689, 0.920000016689),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/CBMetalCrate'},
10013: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(57.0218696594, 5.15023899078, 0.0),
'hpr': Vec3(270.0, 0.0, 0.0),
'scale': Vec3(0.660517215729, 0.660517215729, 0.660517215729),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/pipes_C'},
10015: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(-25.9598789215, 59.4411621094, 9.73551368713),
'hpr': Vec3(274.089996338, 0.0, 0.0),
'scale': Vec3(1.53790044785, 1.53790044785, 1.53790044785),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/crates_F1'},
10016: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10000,
'pos': Point3(33.3394889832, -18.3643035889, 0.0),
'hpr': Vec3(180.0, 0.0, 0.0),
'scale': Vec3(0.660000026226, 0.660000026226, 0.660000026226),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/pipes_D1'},
10017: {'type': 'model',
'name': 'copy of <unnamed>',
'comment': '',
'parentEntId': 10018,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Point3(169.699996948, 0.0, 0.0),
'scale': Vec3(0.902469694614, 0.902469694614, 0.902469694614),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/pipes_D4'},
10020: {'type': 'model',
'name': 'copy of <unnamed> (2)',
'comment': '',
'parentEntId': 10018,
'pos': Point3(-12.071434021, 0.0, 0.0),
'hpr': Vec3(288.434936523, 0.0, 0.0),
'scale': Vec3(0.902469694614, 0.902469694614, 0.902469694614),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/pipes_D4'},
10022: {'type': 'model',
'name': '<unnamed>',
'comment': '',
'parentEntId': 10021,
'pos': Point3(-5.97179174423, -60.3133621216, 0.0),
'hpr': Vec3(180.0, 0.0, 0.0),
'scale': Vec3(0.869391143322, 0.869391143322, 0.869391143322),
'collisionsOnly': 0,
'modelPath': 'phase_10/models/cashbotHQ/pipes_C'},
10000: {'type': 'nodepath',
'name': 'props',
'comment': '',
'parentEntId': 0,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10010: {'type': 'nodepath',
'name': 'healPuzzle',
'comment': '',
'parentEntId': 0,
'pos': Point3(43.1796302795, 0.0, 0.0),
'hpr': Point3(-90.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0)},
10018: {'type': 'nodepath',
'name': 'rightVertPipes',
'comment': '',
'parentEntId': 10021,
'pos': Point3(-16.4536571503, -45.3981781006, -8.39999961853),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Point3(0.649999976158, 0.649999976158, 1.55999994278)},
10021: {'type': 'nodepath',
'name': 'rightPipes',
'comment': '',
'parentEntId': 10000,
'pos': Point3(0.0, 0.0, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': 1},
10001: {'type': 'path',
'name': 'nearPace',
'comment': '',
'parentEntId': 0,
'pos': Point3(-59.7391967773, 0.0, 0.0),
'hpr': Point3(90.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'pathIndex': 3,
'pathScale': 1.0},
10003: {'type': 'path',
'name': 'bowtie',
'comment': '',
'parentEntId': 0,
'pos': Point3(-40.0336875916, 0.0, 0.0),
'hpr': Point3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'pathIndex': 2,
'pathScale': 1.0},
10005: {'type': 'path',
'name': 'bridgePace',
'comment': '',
'parentEntId': 0,
'pos': Point3(-8.80618190765, -1.5122487545, 0.0),
'hpr': Vec3(0.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'pathIndex': 3,
'pathScale': 1.0},
10008: {'type': 'path',
'name': 'farPace',
'comment': '',
'parentEntId': 0,
'pos': Point3(7.5265827179, 7.56240034103, 0.0),
'hpr': Vec3(90.0, 0.0, 0.0),
'scale': Vec3(1.0, 1.0, 1.0),
'pathIndex': 3,
'pathScale': 1.0}}
Scenario0 = {}
levelSpec = {'globalEntities': GlobalEntities,
'scenarios': [Scenario0]} |
threefoldfoundation/app_backend | plugins/tff_backend/handlers/cron.py | Python | bsd-3-clause | 4,468 | 0.001119 | # -*- coding: utf-8 -*-
# Copyright 2017 GIG Technology NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.3@@
import datetime
import httplib
import json
import logging
import webapp2
from google.appengine.api import urlfetch
from google.appengine.api.app_identity import app_identity
from framework.plugin_loader import get_config
from mcfw.consts import MISSING
from plugins.rogerthat_api.api import friends
from plugins.tff_backend.bizz import get_tf_token_api_key
from plugins.tff_backend.bizz.agenda import update_expired_events
from plugins.tff_backend.bizz.dashboard import rebuild_firebase_data
from plugins.tff_backend.bizz.flow_statistics import check_stuck_flows
from plugins.tff_backend.bizz.global_stats import update_currencies
from plugins.tff_backend.bizz.nodes.stats import save_node_statuses, check_online_nodes, check_offline_nodes
from plugins.tff_backend.configuration import TffConfiguration
from plugins.tff_backend.plugin_consts import NAMESPACE
class BackupHandler(webapp2.RequestHandler):
def get(self):
config = get_config(NAMESPACE)
assert isinstance(config, TffConfiguration)
if config.backup_bucket is MISSING or not config.backup_bucket:
logging.debug('Backup is disabled')
return
access_token, _ = app_identity.get_access_token('https://www.googleapis.com/auth/datastore')
app_id = app_identity.get_application_id()
timestamp = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
output_url_prefix = 'gs://%s' % config.backup_bucket
if '/' not in output_url_prefix[5:]:
# Only a bucket name has been provided - no prefix or trailing slash
output_url_prefix += '/' + timestamp
else:
output_url_prefix += timestamp
entity_filter = {
'kinds': self.request.get_all('kind'),
'namespace_ids': self.request.get_all('namespace_id')
}
request = {
'project_id': app_id,
'output_url_prefix': output_url_prefix,
'entity_filter': entity_filter
}
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + access_token
}
url = 'https://datastore.googleapis.com/v1/projects/%s:export' % app_id
try:
result = urlfetch.fetch(url=url,
| payload=json.dumps(request),
method=urlfetch.POST,
| deadline=60,
headers=headers) # type: urlfetch._URLFetchResult
if result.status_code == httplib.OK:
logging.info(result.content)
else:
logging.error(result.content)
self.response.status_int = result.status_code
except urlfetch.Error:
logging.exception('Failed to initiate export.')
self.response.status_int = httplib.INTERNAL_SERVER_ERROR
class RebuildSyncedRolesHandler(webapp2.RequestHandler):
def get(self):
api_key = get_tf_token_api_key()
friends.rebuild_synced_roles(api_key, members=[], service_identities=[])
class UpdateGlobalStatsHandler(webapp2.RequestHandler):
def get(self):
update_currencies()
class CheckNodesOnlineHandler(webapp2.RequestHandler):
def get(self):
check_online_nodes()
class CheckOfflineNodesHandler(webapp2.RequestHandler):
def get(self):
check_offline_nodes()
class SaveNodeStatusesHandler(webapp2.RequestHandler):
def get(self):
save_node_statuses()
class ExpiredEventsHandler(webapp2.RequestHandler):
def get(self):
update_expired_events()
class RebuildFirebaseHandler(webapp2.RequestHandler):
def get(self):
rebuild_firebase_data()
class CheckStuckFlowsHandler(webapp2.RequestHandler):
def get(self):
check_stuck_flows()
|
Telestream/telestream-cloud-python-sdk | telestream_cloud_qc_sdk/telestream_cloud_qc/models/black_frame_test.py | Python | mit | 24,026 | 0.000375 | # coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: cloudsupport@telestream.net
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class BlackFrameTest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'level_default_or_custom': 'DefaultOrCustomType',
'level': 'int',
'percentage_of_frame': 'int',
'start_range_enabled': 'bool',
'start_time': 'float',
'end_time': 'float',
'start_range_tolerance': 'float',
'time_secs_or_frames': 'SecsOrFramesType',
'end_range_enabled': 'bool',
'end_range': 'float',
'end_range_tolerance': 'float',
'end_secs_or_frames': 'SecsOrFramesType',
'not_at_any_other_time': 'bool',
'max_time_allowed': 'float',
'max_time_allowed_secs_or_frames': 'SecsOrFramesType',
'max_time_at_start': 'bool',
'max_time_allowed_at_start': 'float',
'max_time_allowed_at_start_secs_or_frames': 'SecsOrFramesType',
'max_time_at_end': 'bool',
'max_time_allowed_at_end': 'float',
'max_time_allowed_at_end_secs_or_frames': 'SecsOrFramesType',
'reject_on_error': 'bool',
'do_correction': 'bool',
'checked': 'bool'
}
attribute_map = {
'level_default_or_custom': 'level_default_or_custom',
'level': 'level',
'percentage_of_frame': 'percentage_of_frame',
'start_range_enabled': 'start_range_enabled',
'start_time': 'start_time',
'end_time': 'end_time',
'start_range_tolerance': 'start_range_tolerance',
'time_secs_or_frames': 'time_secs_or_frames',
'end_range_enabled': 'end_range_enabled',
'end_range': 'end_range',
'end_range_tolerance': 'end_range_tolerance',
'end_secs_or_frames': 'end_secs_or_frames',
'not_at_any_other_time': 'not_at_any_other_time',
'max_time_allowed': 'max_time_allowed',
'max_time_allowed_secs_or_frames': 'max_time_allowed_secs_or_frames',
'max_time_at_start': 'max_time_at_start',
'max_time_allowed_at_start': 'max_time_allowed_at_start',
'max_time_allowed_at_start_secs_or_frames': 'max_time_allowed_at_start_secs_or_frames',
'max_time_at_end': 'max_time_at_end',
'max_time_allowed_at_end': 'max_time_allowed_at_end',
'max_time_allowed_at_end_secs_or_frames': 'max_time_allowed_at_end_secs_or_frames',
'reject_on_error': 'reject_on_error',
'do_correction': 'do_correction',
'checked': 'checked'
}
def __init__(self, level_default_or_custom=None, level=None, percentage_of_frame=None, start_range_enabled=None, start_time=None, end_time=None, start_range_tolerance=None, time_secs_or_frames=None, end_range_enabled=None, end_range=None, end_range_tolerance=None, end_secs_or_frames=None, not_at_any_other_time=None, max_time_allowed=None, max_time_allowed_secs_or_frames=None, max_time_at_start=None, max_time_allowed_at_start=None, max_time_allowed_at_start_secs_or_frames=None, max_time_at_end=None, max_time_allowed_at_end=None, max_time_allowed_at_end_secs_or_frames=None, reject_on_error=None, do_correction=None, checked=None, local_vars_configuration=None): # noqa: E501
"""BlackFrameTest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._level_default_or_custom = None
self._level = None
self._percentage_of_frame = None
self._start_range_enabled = None
self._start_time = None
self._end_time = None
self._start_range_tolerance = None
self._time_secs_or_frames = None
self._end_range_enabled = None
self._end_range = None
self._end_range_tolerance = None
self._end_secs_or_frames = None
self._not_at_any_other_time = None
self._max_time_allowed = None
self._max_time_allowed_secs_or_frames = None
self._max_time_at_start = None
self._max_time_allowed_at_start = None
self._max_time_allowed_at_start_secs_or_frames = None
self._max_time_at_end = None
self._max_time_allowed_at_end = None
self._max_time_allowed_at_end_secs_or_frames = None
self._reject_on_error = None
self._do_correction = None
self._checked = None
self.discriminator = None
if level_default_or_custom is not None:
self.level_default_or_custom = level_default_or_custom
if level is not None:
self.level = level
if percentage_of_frame is not None:
self.percentage_of_frame = percentage_of_frame
if start_range_enabled is not None:
self.start_range_enabled = start_range_enabled
if start_time is not None:
self.start_time = start_time
if end_time is not None:
self.end_time = end_time
if start_range_tolerance is not None:
self.start_range_tolerance = start_range_tolerance
if time_secs_or_frames is not None:
self.time_secs_or_frames = time_secs_or_frames
if end_range_enabled is not None:
self.end_range_enabled = end_range_enabled
if end_range is not None:
self.end_range = end_range
if end_range_tolerance is not None:
self.end_range_tolerance = end_range_tolerance
if end_secs_or_frames is not None:
self.end_secs_or_frames = end_secs_or_frames
if not_at_any_other_time is not None:
self.not_at_any_other_time = not_at_any_other_time
if | max_time_allowed is not None:
self.max_time_allowed = max_time_allowed
if max_time_allowed_secs_or_frames is not None:
self.max_time_allowed_secs_or_frames = max_time_allowed_secs_or_frames
if max_time_at_start is not None:
self.max_time_at_start = max_time_at_start |
if max_time_allowed_at_start is not None:
self.max_time_allowed_at_start = max_time_allowed_at_start
if max_time_allowed_at_start_secs_or_frames is not None:
self.max_time_allowed_at_start_secs_or_frames = max_time_allowed_at_start_secs_or_frames
if max_time_at_end is not None:
self.max_time_at_end = max_time_at_end
if max_time_allowed_at_end is not None:
self.max_time_allowed_at_end = max_time_allowed_at_end
if max_time_allowed_at_end_secs_or_frames is not None:
self.max_time_allowed_at_end_secs_or_frames = max_time_allowed_at_end_secs_or_frames
if reject_on_error is not None:
self.reject_on_error = reject_on_error
if do_correction is not None:
self.do_correction = do_correction
if checked is not None:
self.checked = checked
@property
def level_default_or_custom(self):
"""Gets the level_default_or_custom of this BlackFrameTest. # noqa: E501
:return: The level_default_or_custom of this BlackFrameTest. # noqa: E501
:rtype: DefaultOrCustomType
"""
return self._level_default_or_custom
@level_default_or_custom.setter
def level_default_or_custom(self, level_default_or_custom):
"""Sets the level_default_or_custom of this BlackFrameTest.
:param level_default_or_custom: The level_default_or_custom of this BlackFrameTest. # noqa: E501
:type: DefaultOrCustomType
"""
self._level_default_or_custom = level_def |
kosgroup/odoo | odoo/fields.py | Python | gpl-3.0 | 100,758 | 0.001965 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
""" High-level objects for fields. """
from collections import OrderedDict, defaultdict
from datetime import date, datetime
from functools import partial
from operator import attrgetter
from types import NoneType
import json
import logging
import pytz
import xmlrpclib
import psycopg2
from odoo.sql_db import LazyCursor
from odoo.tools import float_precision, float_repr, float_round, frozendict, \
html_sanitize, human_size, pg_varchar, ustr, OrderedSet
from odoo.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
from odoo.tools.translate import html_translate, _
DATE_LENGTH = len(date.today().strfti | me(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
RENAMED_ATTRS = [ | ('select', 'index'), ('digits_compute', 'digits')]
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__[:-7] + '.schema')
Default = object() # default value for __init__() methods
class SpecialValue(object):
""" Encapsulates a value in the cache in place of a normal value. """
def __init__(self, value):
self.value = value
def get(self):
return self.value
class FailedValue(SpecialValue):
""" Special value that encapsulates an exception instead of a value. """
def __init__(self, exception):
self.exception = exception
def get(self):
raise self.exception
def _check_value(value):
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
return value.get() if isinstance(value, SpecialValue) else value
def copy_cache(records, env):
""" Recursively copy the cache of ``records`` to the environment ``env``. """
todo, done = set(records), set()
while todo:
record = todo.pop()
if record not in done:
done.add(record)
target = record.with_env(env)
for name in record._cache:
field = record._fields[name]
value = record[name]
if isinstance(value, BaseModel):
todo.update(value)
target._cache[name] = field.convert_to_cache(value, target, validate=False)
def resolve_mro(model, name, predicate):
""" Return the list of successively overridden values of attribute ``name``
in mro order on ``model`` that satisfy ``predicate``.
"""
result = []
for cls in type(model).__mro__:
if name in cls.__dict__:
value = cls.__dict__[name]
if not predicate(value):
break
result.append(value)
return result
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
``__slots__`` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots)
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
class Field(object):
""" The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instanciating a field:
:param string: the label of the field seen by users (string); if not
set, the ORM takes the field name in the class (capitalized).
:param help: the tooltip of the field seen by users (string)
:param readonly: whether the field is readonly (boolean, by default ``False``)
:param required: whether the value of the field is required (boolean, by
default ``False``)
:param index: whether the field is indexed in database (boolean, by
default ``False``)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value; use
``default=None`` to discard default values for the field
:param states: a dictionary mapping state values to lists of UI attribute-value
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
Note: Any state-based condition requires the ``state`` field value to be
available on the client-side UI. This is typically done by including it in
the relevant views, possibly made invisible if not relevant for the
end-user.
:param groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param string oldname: the previous name of this field, so that ORM can rename
it automatically at migration
.. _field-computed:
.. rubric:: Computed fields
One can define a field whose value is computed instead of simply being
read from the database. The attributes that are specific to computed
fields are given below. To define such a field, simply provide a value
for the attribute ``compute``.
:param compute: name of a method that computes the field
:param inverse: name of a method that inverses the field (optional)
:param search: name of a method that implement search on the field (optional)
:param store: whether the field is stored in database (boolean, by
default ``False`` on computed fields)
:param compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (boolean, by default ``False``)
The methods given for ``compute``, ``inverse`` and ``search`` are model
methods. Their signature is shown in the following example::
upper = fields.Char(compute='_compute_upper',
inverse='_inverse_upper',
search='_search_upper')
@api.depends('name')
def _compute_upper(self):
for rec in self:
rec.upper = rec.name.upper() if rec.name else False
def _inverse_upper(self):
for rec in self:
rec.name = rec.upper.lower() if rec.upper else False
def _search_upper(self, operator, value):
if operator == 'like':
operator = 'ilike'
return [('name', operator, value)]
The compute method has to assign the field on all records of the invoked
recordset. The decorator :meth:`odoo.api.depends` must be applied on
the compute method to specify the field dependencies; those dependencies
are used to determine when to recompute the field; recomputation is
automatic and guarantees cache/database consistency. Note that the same
method can be used for several fields, you simply have to assign all the
given fields in th |
jhoenicke/python-trezor | trezorlib/tests/device_tests/test_msg_verifymessage.py | Python | lgpl-3.0 | 8,390 | 0.002145 | # This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from trezorlib import btc
from .common import TrezorTest
class TestMsgVerifymessage(TrezorTest):
def test_message_long(self):
self.setup_mnemonic_nopin_nopassphrase()
ret = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"205ff795c29aef7538f8b3bdb2e8add0d0722ad630a140b6aefd504a5a895cbd867cbb00981afc50edd0398211e8d7c304bb8efa461181bc0afa67ea4a720a89ed"
),
"VeryLongMessage!" * 64,
)
assert ret is True
def test_message_testnet(self):
self.setup_mnemonic_nopin_nopassphrase()
ret = btc.verify_message(
self.client,
"Testnet",
"mirio8q3gtv7fhdnmb3TpZ4EuafdzSs7zL",
bytes.fromhex(
"209e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message.",
)
assert ret is True
def test_message_verify(self):
self.setup_mnemonic_nopin_nopassphrase()
# uncompressed pubkey - OK
res = btc.verify_message(
self.client,
"Bitcoin",
"1JwSSubhmg6iPtRjtyqhUYYH7bZg3Lfy1T",
bytes.fromhex(
"1ba77e01a9e17ba158b962cfef5f13dfed676ffc2b4bada24e58f784458b52b97421470d001d53d5880cf5e10e76f02be3e80bf21e18398cbd41e8c3b4af74c8c2"
),
"This is an example of a signed message.",
)
assert res is True
# uncompressed pubkey - FAIL - wrong sig
res = btc.verify_message(
self.client,
"Bitcoin",
"1JwSSubhmg6iPtRjtyqhUYYH7bZg3Lfy1T",
bytes.fromhex(
"1ba77e01a9e17ba158b962cfef5f13dfed676ffc2b4bada24e58f784458b52b97421470d001d53d5880cf5e10e76f02be3e80bf21e18398cbd41e8c3b4af74c800"
),
"This is an example of a signed message.",
)
assert res is False
# uncompressed pubkey - FAIL - wrong msg
res = btc.verify_message(
self.client,
"Bitcoin",
"1JwSSubhmg6iPtRjtyqhUYYH7bZg3Lfy1T",
bytes.fromhex(
"1ba77e01a9e17ba158b962cfef5f13dfed676ffc2b4bada24e58f784458b52b97421470d001d53d5880cf5e10e76f02be3e80bf21e18398cbd41e8c3b4af74c8c2"
),
"This is an example of a signed message!",
)
assert res is False
# compressed pubkey - OK
res = btc.verify_message(
self.client,
"Bitcoin",
"1C7zdTfnkzmr13HfA2vNm5SJYRK6nEKyq8",
bytes.fromhex(
"1f44e3e461f7ca9f57c472ce1a28214df1de1dadefb6551a32d1907b80c74d5a1fbfd6daaba12dd8cb06699ce3f6941fbe0f3957b5802d13076181046e741eaaaf"
),
"This is an example of a signed message.",
)
assert res is True
# compressed pubkey - FAIL - wrong sig
res = btc.verify_message(
self.client,
"Bitcoin",
"1C7zdTfnkzmr13HfA2vNm5SJYRK6nEKyq8",
bytes.fromhex(
"1f44e3e461f7ca9f57c472ce1a28214df1de1dadefb6551a32d1907b80c74d5a1fbfd6daaba12dd8cb06699ce3f6941fbe0f3957b5802d13076181046e741eaa00"
),
"This is an example of a signed message.",
)
assert res is False
# compressed pubkey - FAIL - wrong msg
res = btc.verify_message(
self.client,
"Bitcoin",
"1C7zdTfnkzmr13HfA2vNm5SJYRK6nEKyq8",
bytes.fromhex(
"1f44e3e461f7ca9f57c472ce1a28214df1de1dadefb6551a32d1907b80c74d5a1fbfd6daaba12dd8cb06699ce3f6941fbe0f3957b5802d13076181046e741eaaaf"
),
"This is an example of a signed message!",
)
assert res is False
# trezor pubkey - OK
res = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"209e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message.",
)
assert res is True
# trezor pubkey - FAIL - wrong sig
res = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"209e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be00"
),
"This is an example of a signed message.",
)
assert res is False
# trezor pubkey - FAIL - wrong msg
res = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"209e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an examp | le of a signed message!",
)
assert res is False
def test_message_verify_bcash(self):
self.setup_mnemonic_nopin_nopassphrase()
res = btc.verify_message(
self.client,
"Bcash",
"bitcoincash:qqj22md58nm09vpwsw82fyletkxkq36zxyxh322pru",
bytes.fromhex(
| "209e23edf0e4e47ff1dec27f32cd78c50e74ef018ee8a6adf35ae17c7a9b0dd96f48b493fd7dbab03efb6f439c6383c9523b3bbc5f1a7d158a6af90ab154e9be80"
),
"This is an example of a signed message.",
)
assert res is True
def test_verify_bitcoind(self):
self.setup_mnemonic_nopin_nopassphrase()
res = btc.verify_message(
self.client,
"Bitcoin",
"1KzXE97kV7DrpxCViCN3HbGbiKhzzPM7TQ",
bytes.fromhex(
"1cc694f0f23901dfe3603789142f36a3fc582d0d5c0ec7215cf2ccd641e4e37228504f3d4dc3eea28bbdbf5da27c49d4635c097004d9f228750ccd836a8e1460c0"
),
u"\u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy",
)
assert res is True
def test_verify_utf(self):
self.setup_mnemonic_nopin_nopassphrase()
words_nfkd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
words_nfc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
res_nfkd = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"20d0ec02ed8da8df23e7fe9e680e7867cc290312fe1c970749d8306ddad1a1eda41c6a771b13d495dd225b13b0a9d0f915a984ee3d0703f92287bf8009fbb9f7d6"
),
words_nfkd,
)
res_nfc = btc.verify_message(
self.client,
"Bitcoin",
"14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e",
bytes.fromhex(
"20d0ec02ed8da8df23e7fe9e680e7867cc |
florian-f/sklearn | examples/ensemble/plot_adaboost_multiclass.py | Python | bsd-3-clause | 3,612 | 0.002215 | """
=====================================
Multi-class AdaBoosted Decision Trees
=====================================
This example reproduces Figure 1 of Zhu et al [1] and shows how boosting can
improve prediction accuracy on a multi-class problem. The classification
dataset is constructed by taking a ten-dimensional standard normal distribution
and defining three classes separated by nested concentric ten-dimensional
spheres such that roughly equal numbers of samples are in each class (quantiles
of the :math:`\chi^2` distribution).
The performance of the SAMME and SAMME.R [1] algorithms are compared. SAMME.R
uses the probability estimates to update the additive model, while SAMME uses
the classifications only. As the example illustrates, the SAMME.R algorithm
typically converges faster than SAMME, achieving a lower test error with fewer
boosting iterations. The error of each algorithm on the test set after each
boosting iteration is shown on the left, the classification error on the test
set of each tree is shown in the middle, and the boost weight of each tree is
shown on the right. All trees have a weight of one in the SAMME.R algorithm and
therefore are not shown.
.. [1] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009.
"""
print(__doc__)
# Author: Noel Dawe <noel.dawe@gmail.com>
#
# License: BSD
from sklearn.externals.six.moves import zip
import pylab as pl
from sklearn.datasets import make_gaussian_quantiles
from sklearn.ensemble import AdaBoostClassifier
from sklearn.externals.six.moves import xrange
from sklearn.metrics import accuracy_score
from sklearn.tree import DecisionTreeClassifier
X, y = make_gaussian_quantiles(n_samples=13000, n_features=10,
n_classes=3, random_state=1)
n_split = 3000
X_train, X_test = X[:n_split], X[n_split:]
y_train, y_test = y[:n_split], y[n_split:]
bdt_real = AdaBoostClassifier(
DecisionTreeClassifier(max_depth=2),
n_estimators=600,
learning_rate=1)
bdt_discrete = AdaBoostClassifier(
DecisionTreeClassifier(max_depth=2),
n_estimators=600,
learning_rate=1.5,
algorithm="SAMME")
bdt_real.fit(X_train, y_train)
bdt_discrete.fit(X_train, y_train)
real_test_errors = []
discrete_test_errors = []
for real_test_predict, discrete_train_predict in zip(
bdt_real.staged_predict(X_test), bdt_discrete.staged_predict(X_test)):
real_test_errors.append(
1. - accuracy_score(real_test_predict, y_test))
discrete_test_errors.append(
1. - accuracy_score(discrete_train_predict, y_test))
n_trees = xrange(1, len(bdt_discrete) + 1)
pl.figure(figsize=(15, 5))
pl.subplot(131)
pl.plot(n_trees, discrete_test_errors, c='black', label='SAMME')
pl.plot(n_trees, real_test_errors, c='black',
linestyle='dashed', label='SAMME.R')
pl.legend()
pl.ylim(0.18, 0.62)
pl.ylabel('Test Error')
pl.xlabel('Number of Trees')
pl.subplot | (132)
pl.plot(n_trees, bdt_discrete.estimator_errors_, "b", label='SAMME', alpha=.5)
pl.plot(n_trees, bdt_real.estimator_errors_, "r", label='SAMME.R', alpha=.5)
pl.legend()
pl.ylabe | l('Error')
pl.xlabel('Number of Trees')
pl.ylim((.2,
max(bdt_real.estimator_errors_.max(),
bdt_discrete.estimator_errors_.max()) * 1.2))
pl.xlim((-20, len(bdt_discrete) + 20))
pl.subplot(133)
pl.plot(n_trees, bdt_discrete.estimator_weights_, "b", label='SAMME')
pl.legend()
pl.ylabel('Weight')
pl.xlabel('Number of Trees')
pl.ylim((0, bdt_discrete.estimator_weights_.max() * 1.2))
pl.xlim((-20, len(bdt_discrete) + 20))
# prevent overlapping y-axis labels
pl.subplots_adjust(wspace=0.25)
pl.show()
|
apagac/robottelo | tests/foreman/ui/test_sso.py | Python | gpl-3.0 | 10,139 | 0.000197 | # -*- encoding: utf-8 -*-
"""Test class for installer (UI)"""
from robottelo.common.decorators import stubbed
from robottelo.test import UITestCase
class TestSSOUI(UITestCase):
# Notes for SSO testing:
# Of interest... In some test cases I've placed a few comments prefaced
# with "devnote:" These are -- obviously -- notes from developers that
# might help reiterate something important or a reminder of way(s) to test
# something.
# There may well be more cases that I have missed for this feature, and
# possibly other LDAP types. These (in particular, the LDAP variations)
# can be easily added later.
@stubbed()
def test_sso_kerberos_basic_no_roles(self):
"""@test: SSO - kerberos login (basic) that has no rights
@feature: SSO
@setup: assure SSO with kerberos is set up.
@steps:
1. attempt to login using a kerberos ID
@assert: Log in to foreman UI successfully but cannot access anything
useful in UI
@status: Manual
"""
@stubbed()
def test_sso_kerberos_basic_roles(self):
"""@test: SSO - kerberos login | (basic) that has rights assigned
@ | feature: SSO
@setup: assure SSO with kerberos is set up.
@steps:
1. attempt to login using a kerberos ID
@assert: Log in to foreman UI successfully and can access functional
areas in UI
@status: Manual
"""
@stubbed()
def test_sso_kerberos_user_disabled(self):
"""@test: Kerberos user activity when kerb account has been deleted or
deactivated
@feature: SSO
@steps:
1. Login to the foreman UI
2. Delete or disable userid on kerb server side
@assert: This is handled gracefully (user is logged out perhaps?)
and no data corruption
@status: Manual
"""
@stubbed()
def test_sso_ipa_basic_no_roles(self):
"""@test: Login with LDAP - IPA for user with no roles/rights
@feature: SSO
@setup: assure properly functioning IPA server for authentication
@steps:
1. Login to server with an IPA id
@assert: Log in to foreman UI successfully but cannot access
functional areas of UI
@status: Manual
"""
@stubbed()
def test_sso_ipa_basic_roles(self):
"""@test: Login with LDAP - IPA for user with roles/rights
@feature: SSO
@setup: assure properly functioning IPA server for authentication
@steps:
1. Login to server with an IPA id
@assert: Log in to foreman UI successfully and can access appropriate
functional areas in UI
@status: Manual
"""
@stubbed()
def test_sso_ipa_user_disabled(self):
"""@test: LDAP - IPA user activity when IPA account has been deleted
or deactivated
@feature: SSO
@steps:
1. Login to the foreman UI
2. Delete or disable userid on IPA server side
@assert: This is handled gracefully (user is logged out perhaps?)
and no data corruption
@status: Manual
"""
@stubbed()
def test_sso_openldap_basic_no_roles(self):
"""@test: Login with LDAP - OpenLDAP that has no roles / rights
@feature: SSO
@setup: assure properly functioning OpenLDAP server for authentication
@steps:
1. Login to server with an OpenLDAP id
@assert: Log in to foreman UI successfully but has no access to
functional areas of UI.
@status: Manual
"""
@stubbed()
def test_sso_openldap_basic_roles(self):
"""@test: Login with LDAP - OpenLDAP for user with roles/rights assigned
@feature: SSO
@setup: assure properly functioning OpenLDAP server for authentication
@steps:
1. Login to server with an OpenLDAP id
@assert: Log in to foreman UI successfully and can access appropriate
functional areas in UI
@status: Manual
"""
@stubbed()
def test_sso_openldap_user_disabled(self):
"""@test: LDAP - OpenLDAP user activity when OpenLDAP account has been
deleted or deactivated
@feature: SSO
@steps:
1. Login to the foreman UI
2. Delete or disable userid on OpenLDAP server side
@assert: This is handled gracefully (user is logged out perhaps?) and
no data corruption
@status: Manual
"""
@stubbed()
def test_sso_multiple_ldap_backends(self):
"""@test: SSO - multiple LDAP servers kafo instance
@feature: SSO
@setup: assure more than one ldap server backend is provide for sat6 /
@steps:
1. Attempt to login with a user that exists on one ldap server
2. Logout and attempt to login with a user that exists on other ldap
server(s)
@assert: Log in to foreman UI successfully for users on both LDAP
servers.
@status: Manual
"""
@stubbed()
def test_sso_multiple_ldap_namespace_collision(self):
# devnote:
# users have auth_source which could distinguish them, but validation
# would fail atm
"""@test: SSO - multiple LDAP servers colliding namespace (i.e "jsmith")
@feature: SSO
@setup: more than 1 ldap server backend provide for instance with
@steps:
1. Attempt to login with a user that exists on one ldap server
2. Logout and attempt to login with a user that exists on other
ldap server(s)
@assert: Foreman should have some method for distinguishing/specifying
which server a user comes from.
@status: Manual
"""
@stubbed()
def test_sso_ldap_user_named_admin(self):
# devnote:
# shouldn't be a problem since admin from internal DB will be used at
# first, worth of testing thou, however if authentication is done by
# external system (IPA, ...) which can create users in foreman,
# I'm not sure about result
"""@test: SSO - what happens when we have an ldap user named "admin"?
@feature: SSO
@steps:
1. Try to login with ldap user "admin"
@assert: Login from local db user "admin" overrides any ldap user
"admin"
@status: Manual
"""
@stubbed()
def test_sso_ldap_server_down_before_session(self):
"""@test: SSO - what happens when we have an ldap server that goes down
before logging in?
@feature: SSO
@steps:
1. Try to login with ldap user when server is non-responsive
@assert: UI does handles situation gracefully, perhaps informing user
that LDAP instance is not responding
@status: Manual
"""
@stubbed()
def test_sso_ldap_server_down_during_session(self):
"""@test: SSO - what happens when we have an ldap server that goes down
after login?
@feature: SSO
@steps:
1. Try to login with ldap user
2. While logged in with ldap user, disconnect access to ldap server.
@assert: Situation is handled gracefully and without serious data
loss on foreman server
@status: Manual
"""
@stubbed()
def test_sso_usergroup_roles_read(self):
"""@test: Usergroups: group roles get pushed down to user
@feature: SSO
@setup: assign roles to an LDAP usergroup
@steps:
1. Login to foreman with LDAP user that is part of aforementioned
usergroup
@assert: User has access to all functional areas that are assigned to
aforementioned usergroup.
@status: Manual
"""
@stubbed()
def test_sso_usergroup_roles_update(self):
"""@test: Usergroups: added usergroup roles get pushed down to user
@feature: SSO
@setup: assign additional roles to an LDAP usergroup
@steps:
1. Login to foreman with LDAP user that is part of aforementioned
usergroup
@assert: User ha |
amith01994/intellij-community | python/testData/intentions/afterReturnTypeInNewGoogleDocString.py | Python | apache-2.0 | 68 | 0.029412 | d | ef f(x):
"""
Returns:
object:
"" | "
return 42 |
BeTeK/EliteMerchant | src/ui/GuideTab.py | Python | bsd-3-clause | 615 | 0.003252 |
import ui.GuideTabUI
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import QVariant
import ui.TabAbstract
class GuideTab(ui.GuideTabUI.Ui_Dialog, QtWidgets.QWidget, ui.TabAbstract.TabAbstract):
def __init__(self, db, analyzer, tabName, | mainWindow):
super(QtWidgets.QWidget, self).__init__()
self.setupUi(self)
self.mainWindow = mainWindow
self.tabName = tabName
def setTabName(self, name):
self.tabName = name
def getType(self):
return "guide"
def getTabName(self):
r | eturn "User Guide"
def dispose(self):
pass
|
Basic-Components/auth-center | auth-center/App/auth/login_agents.py | Python | mit | 533 | 0 | from App.model import UserAgents
async def agent_save(reques | t, user):
ua = request.headers['User-Agent']
now = datetime.datetime.now()
uas = [await i for i in user.ips]
if ua in [i.content for i in contents]:
ua_in = [i for i in uas if i.content == content][0]
ua_in.utime = now
ua_in.count += 1
await ip_in.save()
else:
await UserAgents.insert_many([ | {
"content": ua,
'ctime': now,
'utime': now,
'user': user
}])
|
pymagic-org/pymagic_driver | bmp180.py | Python | mit | 5,278 | 0.010421 | import pyb
from struct import unpack as unp
# BMP180 default address
BMP180_I2CADDR = 0x77
# Operating Modes
BMP180_ULTRALOWPOWER = 0
BMP180_STANDARD = 1
BMP180_HIGHRES = 2
BMP180_ULTRAHIGHRES = 3
# BMP180 Registers
BMP180_CAL_AC1 = 0xAA
BMP180_CAL_AC2 = 0xAC
BMP180_CAL_AC3 = 0xAE
BMP180_CAL_AC4 = 0xB0
BMP180_CAL_AC5 = 0xB2
BMP180_CAL_AC6 = 0xB4
BMP180_CAL_B1 = 0xB6
BMP180_CAL_B2 = 0xB8
BMP180_CAL_MB = 0xBA
BMP180_CAL_MC = 0xBC
BMP180_CAL_MD = 0xBE
BMP180_CONTROL = 0xF4
BMP180_TEMPDATA = 0xF6
BMP180_PRESSUREDATA = 0xF6
# Commands
BMP180_READTEMPCMD = 0x2E
BMP180_READPRESSUREDCMD = 0x34
class BMP180():
def __init__(self, bus=1, address=BMP180_I2CADDR, mode=BMP180_STANDARD):
self._mode = mode
self._address = address
self._bus = pyb.I2C(bus, pyb.I2C.MASTER)
# Load calibration values
self._load_calibration()
def _read_byte(self, cmd):
return self._bus.mem_read(1,self._address,cmd)[0]
#return unp('>h',self._bus.mem_read(1,self._address, cmd))[0]
def _read_u16(self, cmd):
result = self._bus.mem_read(2,self._address,cmd)
return (result[0]<<8)+result[1]
# return unp('>h',self._bus.mem_read(2,self._address, cmd))[0]
def _read_s16(self, cmd):
result = self._read_u16(cmd)
if result > 32767:
result -= (1<<16)
return result
def _read_u24(self, cmd):
result = self._bus.mem_read(3,self._address,cmd)
#print(result)
return (result[0]<<16)+(result[1]<<8)+result[2]
def _write_byte(self, cmd, val):
self._bus.mem_write(val, self._address, cmd)
def _load_calibration(self):
"load calibration"
self.cal_AC1 = self._read_s16(BMP180_CAL_AC1)
self.cal_AC2 = self._read_s16(BMP180_CAL_AC2)
self.cal_AC3 = self._read_s16(BMP180_CAL_AC3)
self.cal_AC4 = self._read_u16(BMP180_CAL_AC4)
self.cal_AC5 = self._read_u16(BMP180_CAL_AC5)
self.cal_AC6 = self._read_u16(BMP180_CAL_AC6)
self.cal_B1 = self._read_s16(BMP180_CAL_B1)
self.cal_B2 = self._read_s16(BMP180_CAL_B2)
self.cal_MB = self._read_s16(BMP180_CAL_MB)
self.cal_MC = self._read_s16(BMP180_CAL_MC)
self.cal_MD = self._read_s16(BMP180_CAL_MD)
def read_raw_temp(self):
"""Reads the raw (uncompensated) temperature from the sensor."""
self._write_byte(BMP180_CONTROL, BMP180_READTEMPCMD)
pyb.udelay(4500)
raw = self._read_s16(BMP180_TEMPDATA)
return raw
def read_raw_pressure(self):
"""Reads the raw (uncompensated) pressure level from the sensor."""
conversion_time = [5000, 8000, 14000, 26000]
self._write_byte(BMP180_CONTROL, BMP180_READPRESSUREDCMD+(self._mode<<6))
pyb.udelay(conversion_time[self._mode])
raw = self._read_u24(BMP180_PRESSUREDATA)>>(8-self._mode)
#MSB = self._read_byte(BMP180_PRESSUREDATA)
#LSB = self._read_byte(BMP180_PRESSUREDATA+1)
#XLSB = self._read_byte(BMP180_PRESSUREDATA+2)
#raw = ((MSB << 16) + (LSB << 8) + XLSB) >> (8 - self._mode)
return raw
def read_temperature(self):
"""Gets teh compensated temperature in degrees celsius."""
UT = self.read_raw_temp()
X1 = ((UT-self.cal_AC6) * self.cal_AC5) >> 15
X2 = (self.cal_MC << 11) / (X1 + self.cal_MD)
B5 = X1 + X2
#print('B5 = ',B5)
temp = (int(B5 + 8) >> 4) / 10.0
return temp
def read_pressure(self):
"""Gets the compensated pressure in Pascals."""
UT = self.read_raw_temp()
UP = self.read_raw_pressure()
X1 = ((UT -self.cal_AC6) * self.cal_AC5) >> 15
X2 = (self.cal_MC << 11) / (X1 + self.cal_MD)
B5 = X1 + X2
# Pressure Calculations
B6 = int(B5 - 4000)
X1 = (self.cal_B2 * (B6 * B6) >> 12) >> 11
X2 = (self.cal_AC2 * B6) >> 11
X3 = X1 + X2
B3 = (((self.cal_AC1 * 4 + X3) << self._mode) + 2) / 4
X1 = (self.cal_AC3 * B6) >> 13
X2 = (self.cal_B1 * ((B6 * B6) >> 12)) >> 16
X3 = ((X1 + X2) + 2) >> 2
B4 = (self.cal_AC4 * (X3 + 32768)) >> 15
B7 = (UP - B3) * (50000 >> self._mode)
if B7 < 0x80000000:
p = int((B7 * 2) / B4)
else:
p = int((B7 / B4) * 2)
X1 = (p >> 8) * (p >> 8)
X1 = (X1 * 3038) >> 16
X2 = (-7357 * p) >> 16
p = p + ((X1 + X2 + 3791) >> 4)
return p
def read_al | titude(self, sealevel_pa = 101325.0):
"""Calculates the altitude in meters."""
#Calculation taken straight from section 3 | .6 of the datasheet
pressure = float(self.read_pressure())
altitude = 44330.0 * (1.0 - pow(pressure / sealevel_pa, (1.0 /5.255)))
return altitude
def read_sealevel_pressure(self, altitude_m=0.0):
"""Calculates the pressure at sealevel when given a know
altitude in meters. Returns a value in Pascals."""
pressure = float(self.read_pressure())
p0 = pressure / pow(1.0 - altitude_m/4433.0, 5.255)
return p0
|
ajillion-by-crossrider/ajillion-rpc-client | rpcclient/test/test_with_httpretty.py | Python | apache-2.0 | 17,368 | 0.001036 | import concurrent
from concurrent.futures._base import Future
import json
from threading import Barrier
import time
import unittest
import requests_mock
from rpcclient.client import RpcClient
from rpcclient.deserialize import DictDeserializer
from rpcclient.exceptions import RemoteFailedError
from rpcclient.handlers import RequestHandler
from rpcclient.test.testutils import insert_id, create_mock_rpc_client
UNMAPPED_BEHAVIOUR = DictDeserializer.UnmappedBehaviour
__author__ = 'yoav.luft@ajillionmax.com'
class ClientTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.client = create_mock_rpc_client()
def test_login(self):
self.assertEqual(self.client.token, "yea")
@requests_mock.mock()
def test_get_first_level_method(self, mock):
mock.register_uri('POST', "http://server/api/", status_code=200, json=insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}}),
)
self.client.test(arg1="arg")
request = mock.request_history[-1].json()
self.assertRegex(request['jsonrpc'], '2.0')
self.assertRegex(request['method'], 'test')
self.assertIn('token', request['params'])
self.assertRegex(request['params']['token'], 'yea')
self.assertIn('arg1', request['params'])
self.assertRegex(request['params']['arg1'], 'arg')
@reque | sts_mock.mock()
def test_get_second_level_method(self, mock):
mock.register_uri('POST', "http://server/api/", status_code=200, json=insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}}),
)
self.client.test.level2(arg1="arg")
request = mock. | request_history[-1].json()
self.assertRegex(request['jsonrpc'], '2.0')
self.assertRegex(request['method'], 'test.level2')
self.assertIn('token', request['params'])
self.assertRegex(request['params']['token'], 'yea')
self.assertIn('arg1', request['params'])
self.assertRegex(request['params']['arg1'], 'arg')
@requests_mock.mock()
def test_async_request(self, mock):
mock.register_uri('POST', "http://server/api/", [
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report_token": "08d7d7bc608848668b3afa6b528a45d8"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "ready"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}})},
])
start_time = time.time()
interval_time = 2
response = self.client.test.task(_sleep_interval=interval_time)
self.assertEqual(response, {"report": "success"})
self.assertGreater(time.time() - start_time, interval_time, "Expected request to wait between calls")
last_request = mock.request_history[-1].json()
self.assertIn('method', last_request)
self.assertRegex(last_request['method'], 'report.data.get')
self.assertIn('params', last_request)
self.assertIn('report_token', last_request['params'])
self.assertRegex(last_request['params']['report_token'], "08d7d7bc608848668b3afa6b528a45d8")
@requests_mock.mock()
def test_async_timeout(self, mock):
mock.register_uri('POST', "http://server/api/", [
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report_token": "08d7d7bc608848668b3afa6b528a45d8"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "ready"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}})},
])
self.assertRaises(TimeoutError, self.client.test.task, _timeout=3, _sleep_interval=2)
@requests_mock.mock()
def test_async_timeout_from_configuration(self, mock):
mock.register_uri('POST', "http://server/api/", [
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report_token": "08d7d7bc608848668b3afa6b528a45d8"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "ready"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}})},
])
self.client.configuration['timeout'] = 3
self.client.configuration['sleep_interval'] = 2
self.assertRaises(TimeoutError, self.client.test.task)
@requests_mock.mock()
def test_async_handler_ignores_single_failure_for_status(self, mock):
mock.register_uri('POST', "http://server/api/", [
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report_token": "08d7d7bc608848668b3afa6b528a45d8"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "processing"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"status": "ready"}})},
{'status_code': 200, 'json': insert_id(
{"error": None, "jsonrpc": "2.0", "id": {},
"result": {"report": "success"}})},
])
interval_time = 1
response = self.client.test.task(_sleep_interval=interval_time)
self.assertEqual(response, {"report": "success"})
def test_override_handlers(self):
called_with_params = {}
class MockHandler(RequestHandler):
def __init__(self, method, url, headers, token, configuration=None, **kwargs):
super().__init__(method, url, headers, token, configuration, **kwargs)
called_with_params['method'] = method
def handle(self, **kwargs):
return 'Mock value'
client = RpcClient(configuration={
'host': 'http://mockhost',
'handlers': [
(lambda *args, **kwargs: T |
miurahr/translate | translate/convert/test_po2prop.py | Python | gpl-2.0 | 17,884 | 0.001626 | from io import BytesIO
from translate.convert import po2prop, test_convert
from translate.storage import po
class TestPO2Prop:
def po2prop(self, posource):
"""helper that converts po source to .properties source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2prop.po2prop()
outputprop = convertor.convertstore(inputpo)
return outputprop
def merge2prop(
self,
propsource,
posource,
personality="java",
remove_untranslated=False,
encoding="utf-8",
):
"""helper that merges po translations to .properties source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
templatefile = BytesIO(
propsource.encode() if isinstance(propsource, str) else propsource
)
# templateprop = properties.propfile(templatefile)
convertor = po2prop.reprop(
templatefile,
inputpo,
personality=personality,
remove_untranslated=remove_untranslated,
)
outputprop = convertor.convertstore()
print(outputprop)
return outputprop.decode(encoding)
def test_merging_simple(self):
"""check the simplest case of merging a translation"""
posource = """#: prop\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop=value\n"""
propexpected = """prop=waarde\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_untranslated(self):
"""check the simplest case of merging an untranslated unit"""
posource = """#: prop\nmsgid "value"\nmsgstr ""\n"""
proptemplate = """prop=value\n"""
propexpected = proptemplate
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_hard_newlines_preserved(self):
"""check that we preserver hard coded newlines at the start and end of sentence"""
posource = """#: prop\nmsgid "\\nvalue\\n\\n"\nmsgstr "\\nwaarde\\n\\n"\n"""
proptemplate = """prop=\\nvalue\\n\\n\n"""
propexpected = """prop=\\nwaarde\\n\\n\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_space_preservation(self):
"""check that we preserve any spacing in properties files when merging"""
posource = """#: prop\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop = value\n"""
propexpected = """prop = waarde\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_no_value(self):
"""check that we can handle keys without value"""
posource = """#: KEY\nmsgctxt "KEY"\nmsgid ""\nmsgstr ""\n"""
proptemplate = """KEY = \n"""
propexpected = """KEY = \n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_no_separator(self):
"""check that we can handle keys without separator"""
posource = """#: KEY\nmsgctxt "KEY"\nmsgid ""\nmsgstr ""\n"""
proptemplate = """KEY\n"""
propexpected = """KEY\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_blank_entries(self):
"""check that we can correctly merge entries that are blank in the template"""
posource = r'''#: accesskey-accept
msgid ""
"_: accesskey-accept\n"
""
msgstr ""'''
proptemplate = "accesskey-accept=\n"
propexpected = "accesskey-accept=\n"
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_fuzzy(self):
"""check merging a fuzzy translation"""
posource = """#: prop\n#, fuzzy\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop=value\n"""
propexpected = """prop=value\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_mozilla_accesskeys(self):
"""check merging Mozilla accesskeys"""
posource = """#: prop.label prop.accesskey
msgid "&Value"
msgstr "&Waarde"
#: key.label key.accesskey
msgid "&Key"
msgstr "&Sleutel"
"""
proptemplate = """prop.label=Value
prop.accesskey=V
key.label=Key
key.accesskey=K
"""
propexpected = """prop.label=Waarde
prop.accesskey=W
key.label=Sleutel
key.accesskey=S
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_mozilla_accesskeys_missing_accesskey(self):
"""check merging Mozilla accesskeys"""
posource = """#: prop.label prop.accesskey
# No accesskey because we forgot or language doesn't do accesskeys
msgid "&Value"
msgstr "Waarde"
"""
proptemplate = """prop.label=Value
prop.accesskey=V
"""
propexpected = """prop.label=Waarde
prop.accesskey=V
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_mozilla_margin_whitespace(self):
"""Check handling of Mozilla leading and trailing spaces"""
posource = """#: sepAnd
msgid " and "
msgstr " و "
#: sepComma
msgid ", "
msgstr "، "
"""
proptemplate = r"""sepAnd = \u0020and\u0020
sepComma = ,\u20
"""
propexpected = """sepAnd = \\u0020و\\u0020
sepComma = ،\\u0020
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == | propexpected
def test_mozilla_all_whitespace(self):
"""
Check for all white-space Mozilla hack, | remove when the corresponding code
is removed.
"""
posource = """#: accesskey-accept
msgctxt "accesskey-accept"
msgid ""
msgstr " "
#: accesskey-help
msgid "H"
msgstr "م"
"""
proptemplate = """accesskey-accept=
accesskey-help=H
"""
propexpected = """accesskey-accept=
accesskey-help=م
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_merging_propertyless_template(self):
"""check that when merging with a template with no property values that we copy the template"""
posource = ""
proptemplate = "# A comment\n"
propexpected = proptemplate
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_delimiters(self):
"""test that we handle different delimiters."""
posource = """#: prop\nmsgid "value"\nmsgstr "translated"\n"""
proptemplate = """prop %s value\n"""
propexpected = """prop %s translated\n"""
for delim in ["=", ":", ""]:
print("testing '%s' as delimiter" % delim)
propfile = self.merge2prop(proptemplate % delim, posource)
print(propfile)
assert propfile == propexpected % delim
def test_empty_value(self):
"""test that we handle an value in the template"""
posource = """#: key
msgctxt "key"
msgid ""
msgstr "translated"
"""
proptemplate = """key\n"""
propexpected = """key = translated\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_personalities(self):
"""test that we output correctly for Java and Mozilla style property files. Mozilla uses Unicode, while Java uses escaped Unicode"""
posource = """#: prop\nmsgid "value"\nmsgstr "ṽḁḽṻḝ"\n"""
proptemplate = """prop = value\n"""
propexpectedjava = """prop = \\u1E7D\\u |
mfranczy/prototype | src/leappto/actor_cli.py | Python | lgpl-2.1 | 10,539 | 0.004175 | import logging
import os
import subprocess
import sys
import tempfile
from argparse import ArgumentParser
from contextlib import contextmanager
import argcomplete
import signal
from snactor import loader
from snactor import registry
ACTOR_DIRECTORY = '/usr/share/leapp/actors'
SCHEMA_DIRECTORY = '/usr/share/leapp/schema'
VERSION = "0.2-dev"
def _port_spec(arg):
"""Converts a port forwarding specifier to a (host_port, container_port) tuple
Specifiers can be either a simple integer, where the host and container port are
the same, or else a string in the form "host_port:container_port".
"""
host_port, sep, container_port = arg.partition(":")
host_port = int(host_port)
if not sep:
container_port = host_port
else:
container_port = int(container_port)
return str(host_port), container_port
def _to_port_map(items):
port_map = []
for target, source in items:
port_map.append({
'protocol': 'tcp',
'exposed_port': int(target),
'port': int(source)})
return {'ports': port_map}
def _path_spec(arg):
path = os.path.normpath(arg)
if not os.path.isabs(path):
raise ValueError("Path '{}' is not absolute or valid.".format(str(arg)))
return path
def _make_base_object(s):
return {"value": s}
def _migrate_machine_arguments(parser):
migrate_cmd = parser.add_parser('migrate-machine', help='migrate source VM to a target container host')
migrate_cmd.add_argument("-p", "--print-port-map", default=False, help='List suggested port mapping on target host',
| action="store_true")
migrate_cmd.add_argument('machine', help='source machine to migrate')
migrate_cmd.add_argument('-t', '--target', default='localhost', help='target VM name')
migrate_cmd.add_argument(
'--tcp-port',
default=None,
dest="forwarded_tcp_ports",
nargs='*',
type=_port_spec,
help='(Re)define target tcp ports to forward to macrocontainer - [target_port:source_port]'
)
migrate_cmd.a | dd_argument(
'--no-tcp-port',
default=None,
dest="excluded_tcp_ports",
nargs='*',
type=_port_spec,
help='define tcp ports which will be excluded from the mapped ports [[target_port]:source_port>]'
)
migrate_cmd.add_argument(
'--exclude-path',
default=None,
dest="excluded_paths",
nargs='*',
type=_path_spec,
help='define paths which will be excluded from the source'
)
migrate_cmd.add_argument("--ignore-default-port-map", default=False,
help='Default port mapping detected by leapp toll will be ignored', action="store_true")
migrate_cmd.add_argument('--container-name', '-n', default=None,
help='Name of new container created on target host')
migrate_cmd.add_argument(
'--force-create',
action='store_true',
help='force creation of new target container, even if one already exists'
)
migrate_cmd.add_argument('--disable-start', dest='disable_start', default=False,
help='Migrated container will not be started immediately', action="store_true")
migrate_cmd.add_argument('--target-user', default="root", help='Connect as this user to the target via ssh')
migrate_cmd.add_argument('--source-user', default="root", help='Connect as this user to the source via ssh')
migrate_cmd.add_argument('--debug', default=False, action="store_true", help='Turn on debug logging on stderr')
def _migrate_machine(arguments):
default_excluded_paths = ['/dev/*', '/proc/*', '/sys/*', '/tmp/*', '/run/*', '/mnt/*', '/media/*', '/lost+found/*']
data = {
"target_host": _make_base_object(arguments.target),
"source_host": _make_base_object(arguments.machine),
"use_default_port_map": _make_base_object(not arguments.ignore_default_port_map),
"tcp_ports_user_mapping": _to_port_map(arguments.forwarded_tcp_ports or ()),
"excluded_tcp_ports": {"tcp": {str(x[0]): {"name": ""} for x in arguments.excluded_tcp_ports or ()}},
"excluded_paths": {"value": arguments.excluded_paths or default_excluded_paths},
"start_container": _make_base_object(not arguments.disable_start),
"target_user_name": _make_base_object(arguments.target_user),
"source_user_name": _make_base_object(arguments.source_user),
"force_create": _make_base_object(arguments.force_create),
"user_container_name": _make_base_object(arguments.container_name or ''),
}
if arguments.debug or not arguments.print_port_map:
logging.getLogger().addHandler(logging.StreamHandler(sys.stderr))
return data, 'migrate-machine' if not arguments.print_port_map else 'port-mapping'
@contextmanager
def _stdout_socket():
directory = tempfile.mkdtemp('', 'LEAPP_STDOUT', None)
name = os.path.join(directory, 'leapp_stdout.sock')
registry.register_environment_variable('LEAPP_ACTOR_STDOUT_SOCK', name)
# This might be wanted to be a bit more dynamic but well it's good enough for now
registry.register_environment_variable('LEAPP_ACTOR_OUTPUT', '/usr/bin/actor-stdout')
env = os.environ.copy()
env["LEAPP_ACTOR_STDOUT_SOCK"] = name
p = subprocess.Popen(["actor-stdout", "server"], env=env)
try:
yield
except:
raise
finally:
if p.poll():
logging.error("Output tool ended prematurely with %d", p.returncode)
else:
os.kill(p.pid, signal.SIGTERM)
if os.path.exists(name):
os.unlink(name)
if os.path.exists(directory):
os.rmdir(directory)
def _check_target_arguments(parser):
check_target_cmd = parser.add_parser('check-target', help='check for claimed names on target container host')
check_target_cmd.add_argument('-t', '--target', default='localhost', help='Target container host')
check_target_cmd.add_argument("-s", "--status", default=False, help='Check for services status on target machine',
action="store_true")
check_target_cmd.add_argument('--user', default="root", help='Connect as this user to the target via ssh')
check_target_cmd.add_argument('--debug', default=False, action="store_true", help='Turn on debug logging on stderr')
def _check_target(arguments):
if arguments.debug:
logging.getLogger().addHandler(logging.StreamHandler(sys.stderr))
data = {
'check_target_service_status': _make_base_object(arguments.status),
'target_user_name': _make_base_object(arguments.user),
'target_host': _make_base_object(arguments.target)
}
return data, 'remote-target-check-group'
def _port_inspect_arguments(parser):
scan_ports_cmd = parser.add_parser('port-inspect', help='scan ports on virtual machine')
scan_ports_cmd.add_argument('address', help='virtual machine address')
scan_ports_cmd.add_argument(
'--range',
default=None,
help='port range, example of proper form:"-100,200-1024,T:3000-4000,U:60000-"'
)
scan_ports_cmd.add_argument(
'--shallow',
action='store_true',
help='Skip detailed informations about used ports, this is quick SYN scan'
)
scan_ports_cmd.add_argument('--debug', default=False, action="store_true", help='Turn on debug logging on stderr')
def _port_inspect(arguments):
if arguments.debug:
logging.getLogger().addHandler(logging.StreamHandler(sys.stderr))
data = {
'scan_options': {
'shallow_scan': arguments.shallow,
},
'host': _make_base_object(arguments.address),
}
if arguments.range:
data['scan_options']['port_range'] = arguments.range
return data, 'port-inspect'
def _destroy_container_arguments(parser):
destroy_cmd = parser.add_parser('destroy-container', help='destroy named container on virtual machine')
destroy_cmd.add_argument('-t', '--target', default='localhost', help='Target container host')
destroy_cmd.add_argument('container', |
openearth/delft3d-gt-server | delft3dworker/migrations/0051_auto_20160825_0935.py | Python | gpl-3.0 | 438 | 0 | # -*- coding: utf-8 -*-
| from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("delft3dworker", "0050_auto_20160825_0934"),
]
operations = [
migrations.AlterField(
model_name="container",
name="docker_id",
field=models.CharField(default="", max_length=64, blank=True), |
),
]
|
JMSkelton/Transformer | Transformer/Framework/Utilities.py | Python | gpl-3.0 | 9,340 | 0.013383 | # Transfomer/Framework/Utilities.py
# ----------------
# Module Docstring
# ----------------
""" Utilities for (re)processing sets of structures in parallel. """
# -------
# Imports
# -------
import multiprocessing;
from Transformer import StructureSet;
from Transformer.Utilities import MultiprocessingHelper;
# --------------------------
# MergeStructureSets Routine
# --------------------------
def MergeStructureSets(structureSets, useMP = False, mpNumProcesses = None, printProgressUpdate = True, inPlace = False):
"""
Merge the list of structure sets in structureSets using the StructureSet.UpdateUnion() method.
Arguments:
structureSets -- list of StructureSet objects to merge.
Keyword arguments:
useMP -- passed to StructureSet.Union().
mpNumProcesses -- passed to StructureSet.Union().
printProgressUpdate -- if True (default), print status messages during merging.
inPlace -- if True, this routine will work directly on the StructureSet objects in structureSets, which will cause the first set in structureSets to be modified (default: False).
Return value:
StructureSet object obtained after merging the items in structureSets.
Notes:
inPlace = True is a performance optimisation and should be used only when the supplied structure sets are no longer needed.
If inPlace is not set (the default), the structure sets are cloned before merging using the StructureSet.CloneNew() routine.
"""
# If inPlace is not set, clone the structure sets.
if not inPlace:
structureSetsNew = [];
for structureSet in structureSets:
structures, degeneracies = structureSet.GetStructureSetFlat();
structureSetsNew.append(
structureSet.CloneNew(structures = structures, degeneracies = degeneracies, noInitialMerge = True)
);
structureSets = structureSetsNew;
# If there is only one structure set, no need to do anything.
if len(structureSets) == 1:
return structureSets[0];
# Setting up a tqdm-based progress bar for the reduction would be (a) fiddly, and (b) not particularly informative.
# If a progress bar is requested, we print a set of status messages instead.
if printProgressUpdate:
numStructures = sum(
structureSet.GetStructureCount() for structureSet in structureSets
);
print("MergeStructureSets: Merging {0} structure sets w/ {1} structure(s)".format(len(structureSets), numStructures));
numStructures = sum(
structureSet.GetStructureCount()
for structureSet in structureSets
);
# Format string for printing status messages.
formatString = None;
if printProgressUpdate:
formatString = "{{0: >{0},}}".format(len("{0:,}".format(numStructures)));
# Perform a sequence of unions to merge the structure sets.
structureSetRef = structureSets[0];
numStructuresMerged = structureSetRef.GetStructureCount();
for structureSetAdd in structureSets[1:]:
structureSetRef.UpdateUnion(
structureSetAdd, useMP = useMP, mpNumProcesses = mpNumProcesses
);
numStructuresMerged += structureSetAdd.GetStructureCount();
if printProgressUpdate:
numRemaining = numStructures - numStructuresMerged;
statusMessage = None;
if numRemaining > 0:
statusMessage = "MergeStructureSets: Merged {0} -> {1} structure, {2} remaining".format(
formatString.format(numStructuresMerged), formatString.format(structureSetRef.GetStructureCount()), formatString.format(numRemaining)
);
else:
statusMessage = "MergeStructureSets: Merged {0} -> {1} structure".format(
formatString.format(numStructuresMerged), formatString.format(structureSetRef.GetStructureCount())
);
print(statusMessage);
if printProgressUpdate:
print("");
return structureSetRef;
def _MergeStructureSets_MapFunction(args):
""" Method run by worker processes when performing parallel merging. """
# Unpack arguments.
structureSet1, structureSet2 = args;
# Merge the second structure set into the first.
structureSet1.UpdateUnion(structureSet2);
# Clear symmetry-expansions cache before returning.
structureSet1.ClearSymmetryExpansionsCache();
# Return the merged structure set.
return structureSet1;
# ---------------------------
# ReduceStructureList Routine
# ---------------------------
def ReduceStructureList(structures, degeneracies = None, useMP = False, mpNumProcesses = None, printProgressUpdate = True, **kwargs):
"""
Reduce the list of structures and, optionally, degeneracies, by merging them into StructureSet objects set up using the supplied keyword arguments.
Arguments:
structures -- list of structures to reduce.
Keyword arguments:
degeneracies -- optional list of degeneracies for each structure in structures (default: None).
useMP -- if True, perform parts of the reduction in parallel using process-based multithreading (default: False).
mpNumProcesses -- if useMP is set, specifies the number of processes to use for reduction (default: automatically determined from the number of CPU cores and the list of structures).
printProgressUpdate -- if True, print status messages.
**kwargs -- passed through to the StructureSet constructor.
Return value:
A StructureSet object containing the reduced structure set and associated degeneracies.
Notes:
This method provides a convenience function for reducing a list of structures in parallel using similar process-based threading to the AtomicSubstitutions() routine and its derivatives.
Its main purpose is to allow a structure set built using one symmetryExpansion setting to be "re-reduced" with a stricter one (e.g. symmetryExpansion = 'fast' -> 'full').
The serial code path selected by setting useMP = False simply calls the StructureSet class constructor and is provided for API consistency.
"""
if structures == None or len(structures) == 0:
raise Exception("Error: structures cannot be None and must contain at least one structure.")
if degeneracies != None and len(degeneracies) != len(structures):
raise Exception("Error: If supplied, degeneracies must be the same length as structures.");
# If there is only one structure in the input list, there's no point in taking the parallel code path.
if len(structures) == 1:
useMP = False;
if useMP:
# If not supplied, set mpNumProcesses.
if mpNumProcesses == None:
mpNumProcesses = min(
len(structures), MultiprocessingHelper.CPUCount()
);
# Set up one _StructureSetAccumulator object per worker process.
accumulators = [
_StructureSetAccumulator(**kwargs)
for i in range(0, mpNumProcesses)
];
# Use the MultiprocessingHelper.QueueAccumulate() routine to merge the structures into structure sets.
structureSets = MultiprocessingHelper.QueueAccumulate(
[item for item in zip(structures, degeneracies)], accumulators, progressBar = printProgressUpdate
);
# Merge the structure sets using the MergeStructureSets() utilit | y routine.
return MergeStructureSets(
structureSets, useMP = True, mpNumProcesses = mpNumProcesses, pr | intProgressUpdate = printProgressUpdate, inPlace = True
);
else:
# Serial code path.
return StructureSet.StructureSet(
structures = structures, degeneracies = degeneracies, **kwargs
);
# Helper class for grouping a list of structures into sets.
class _StructureSetAccumulator(MultiprocessingHelper.AccumulatorBase):
"""
Implementation of the MultiprocessingHelper.AccumulatorBase class for building structure sets in parallel.
This class is used as p |
sultan99/online | ONode/Tools/test.py | Python | apache-2.0 | 42,584 | 0.015687 | #!/usr/bin/env python
#
# Copyright 2008 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import imp
import optparse
import os
import platform
import re
import signal
import subprocess
import sys
import tempfile
import time
import threading
import utils
from os.path import join, dirname, abspath, basename, isdir, exists
from datetime import datetime
from Queue import Queue, Empty
VERBOSE = False
# ---------------------------------------------
# --- P r o g r e s s I n d i c a t o r s ---
# ---------------------------------------------
class ProgressIndicator(object):
def __init__(self, cases):
self.cases = cases
self.queue = Queue(len(cases))
for case in cases:
self.queue.put_nowait(case)
self.succeeded = 0
self.remaining = len(cases)
self.total = len(cases)
self.failed = [ ]
self.crashed = 0
self.terminate = False
self.lock = threading.Lock()
def PrintFailureHeader(self, test):
if test.IsNegative():
negative_marker = '[negative] '
else:
negative_marker = ''
print "=== %(label)s %(negative)s===" % {
'label': test.GetLabel(),
'negative': negative_marker
}
print "Path: %s" % "/".join(test.path)
def Run(self, tasks):
self.Starting()
threads = []
# Spawn N-1 threads and then use this thread as the last one.
# That way -j1 avoids threading altogether which is a nice fallback
# in case of threading problems.
for i in xrange(tasks - 1):
thread = threading.Thread(target=self.RunSingle, args=[])
threads.append(thread)
thread.start()
try:
self.RunSingle()
# Wait for the remaining threads
for thread in threads:
# Use a timeout so that signals (ctrl-c) will be processed.
thread.join(timeout=10000000)
except Exception, e:
# If there's an exception we schedule an interruption for any
# remaining threads.
self.terminate = True
# ...and then reraise the exception to bail out
raise
self.Done()
return not self.failed
def RunSingle(self):
while not self.terminate:
try:
test = self.queue.get_nowait()
except Empty:
return
case = test.case
self.lock.acquire()
self.AboutToRun(case)
self.lock.release()
try:
start = datetime.now()
output = case.Run()
case.duration = (datetime.now() - start)
except IOError, e:
assert self.terminate
return
if self.terminate:
return
self.lock.acquire()
if output.UnexpectedOutput():
self.failed.append(output)
if output.HasCrashed():
self.crashed += 1
else:
self.succeeded += 1
self.remaining -= 1
self.HasRun(output)
| self | .lock.release()
def EscapeCommand(command):
parts = []
for part in command:
if ' ' in part:
# Escape spaces. We may need to escape more characters for this
# to work properly.
parts.append('"%s"' % part)
else:
parts.append(part)
return " ".join(parts)
class SimpleProgressIndicator(ProgressIndicator):
def Starting(self):
print 'Running %i tests' % len(self.cases)
def Done(self):
print
for failed in self.failed:
self.PrintFailureHeader(failed.test)
if failed.output.stderr:
print "--- stderr ---"
print failed.output.stderr.strip()
if failed.output.stdout:
print "--- stdout ---"
print failed.output.stdout.strip()
print "Command: %s" % EscapeCommand(failed.command)
if failed.HasCrashed():
print "--- CRASHED ---"
if failed.HasTimedOut():
print "--- TIMEOUT ---"
if len(self.failed) == 0:
print "==="
print "=== All tests succeeded"
print "==="
else:
print
print "==="
print "=== %i tests failed" % len(self.failed)
if self.crashed > 0:
print "=== %i tests CRASHED" % self.crashed
print "==="
class VerboseProgressIndicator(SimpleProgressIndicator):
def AboutToRun(self, case):
print 'Starting %s...' % case.GetLabel()
sys.stdout.flush()
def HasRun(self, output):
if output.UnexpectedOutput():
if output.HasCrashed():
outcome = 'CRASH'
else:
outcome = 'FAIL'
else:
outcome = 'pass'
print 'Done running %s: %s' % (output.test.GetLabel(), outcome)
class DotsProgressIndicator(SimpleProgressIndicator):
def AboutToRun(self, case):
pass
def HasRun(self, output):
total = self.succeeded + len(self.failed)
if (total > 1) and (total % 50 == 1):
sys.stdout.write('\n')
if output.UnexpectedOutput():
if output.HasCrashed():
sys.stdout.write('C')
sys.stdout.flush()
elif output.HasTimedOut():
sys.stdout.write('T')
sys.stdout.flush()
else:
sys.stdout.write('F')
sys.stdout.flush()
else:
sys.stdout.write('.')
sys.stdout.flush()
class TapProgressIndicator(SimpleProgressIndicator):
def Starting(self):
print '1..%i' % len(self.cases)
self._done = 0
def AboutToRun(self, case):
pass
def HasRun(self, output):
self._done += 1
command = basename(output.command[-1])
if output.UnexpectedOutput():
print 'not ok %i - %s' % (self._done, command)
for l in output.output.stderr.splitlines():
print '#' + l
for l in output.output.stdout.splitlines():
print '#' + l
else:
print 'ok %i - %s' % (self._done, command)
duration = output.test.duration
# total_seconds() was added in 2.7
total_seconds = (duration.microseconds +
(duration.seconds + duration.days * 24 * 3600) * 10**6) / 10**6
print ' ---'
print ' duration_ms: %d.%d' % (total_seconds, duration.microseconds / 1000)
print ' ...'
def Done(self):
pass
class CompactProgressIndicator(ProgressIndicator):
def __init__(self, cases, templates):
super(CompactProgressIndicator, self).__init__(cases)
self.templates = templates
self.last_status_length = 0
self.start_time = time.time()
def Starting(self):
pass
def Done(self):
self.PrintProgress('Done')
def AboutToRun(self, case):
self.PrintProgress(case.GetLabel())
def HasRun(self, output):
if output.UnexpectedOutput():
self.ClearLine(self.last_status_length)
self.PrintFailureHeader(output.test)
stdout = output.output.stdout.strip()
if len(stdout):
print self.templates['stdout'] % stdout
stderr = output.output.stderr.strip()
if len(stderr):
print self.templates['stderr'] % stderr
|
numa-engineering/python-daemon | daemon/runner.py | Python | gpl-2.0 | 7,204 | 0 | # -*- coding: utf-8 -*-
# daemon/runner.py
# Part of python-daemon, an implementation of PEP 3143.
#
# Copyright © 2009–2010 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2007–2008 Robert Niederreiter, Jens Klein
# Copyright © 2003 Clark Evans
# Copyright © 2002 Noah Spurrier
# Copyright © 2001 Jürgen Hermann
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Daemon runner library.
"""
import sys
import os
import signal
import errno
from . import pidlockfile
from .daemon import DaemonContext
if sys.version_info >= (3, 0):
unicode = str
basestring = str
class DaemonRunnerError(Exception):
""" Abstract base class for errors from DaemonRunner. """
class DaemonRunnerInvalidActionError(ValueError, DaemonRunnerError):
""" Raised when specified action for DaemonRunner is invalid. """
class DaemonRunnerStartFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure starting DaemonRunner. """
class DaemonRunnerStopFailureError(RuntimeError, DaemonRunnerError):
""" Raised when failure stopping DaemonRunner. """
class DaemonRunner(object):
""" Controller for a callable running in a separate background process.
The first command-line argument is the action to take:
* 'start': Become a daemon and call `app.run()`.
* 'stop': Exit the daemon process specified in the PID file.
* 'restart': Stop, then start.
"""
start_message = "started with pid %(pid)d"
def __init__(self, app):
""" Set up the parameters of a new runner.
The `app` argument must have the following attributes:
* `stdin_path`, `stdout_path`, `stderr_path`: Filesystem
paths to open and replace the existing `sys.stdin`,
`sys.stdout`, `sys.stderr`.
* `pidfile_path`: Absolute filesystem path to a file that
will be used as the PID file for the daemon. If
``None``, no PID file will be used.
* `pidfile_timeout`: Used as the default acquisition
timeout value supplied to the runner's PID lock file.
* `run`: Callable that will be invoked when the daemon is
started.
"""
self.parse_args()
self.app = app
self.daemon_context = DaemonContext()
self.daemon_context.stdin = open(app.stdin_path, 'r')
self.daemon_context.stdout = open(app.stdout_path, 'wb+', b | uffering=0)
self.daemon_context.stderr = open(
app.stderr | _path, 'wb+', buffering=0)
self.pidfile = None
if app.pidfile_path is not None:
self.pidfile = make_pidlockfile(
app.pidfile_path, app.pidfile_timeout)
self.daemon_context.pidfile = self.pidfile
def _usage_exit(self, argv):
""" Emit a usage message, then exit.
"""
progname = os.path.basename(argv[0])
usage_exit_code = 2
action_usage = "|".join(self.action_funcs.keys())
message = "usage: %(progname)s %(action_usage)s" % vars()
emit_message(message)
sys.exit(usage_exit_code)
def parse_args(self, argv=None):
""" Parse command-line arguments.
"""
if argv is None:
argv = sys.argv
min_args = 2
if len(argv) < min_args:
self._usage_exit(argv)
self.action = unicode(argv[1])
if self.action not in self.action_funcs:
self._usage_exit(argv)
def _start(self):
""" Open the daemon context and run the application.
"""
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
try:
self.daemon_context.open()
except pidlockfile.AlreadyLocked:
pidfile_path = self.pidfile.path
raise DaemonRunnerStartFailureError(
"PID file %(pidfile_path)r already locked" % vars())
pid = os.getpid()
message = self.start_message % vars()
emit_message(message)
self.app.run()
def _terminate_daemon_process(self):
""" Terminate the daemon process specified in the current PID file.
"""
pid = self.pidfile.read_pid()
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
raise DaemonRunnerStopFailureError(
"Failed to terminate %(pid)d: %(exc)s" % vars())
def _stop(self):
""" Exit the daemon process specified in the current PID file.
"""
if not self.pidfile.is_locked():
pidfile_path = self.pidfile.path
raise DaemonRunnerStopFailureError(
"PID file %(pidfile_path)r not locked" % vars())
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
else:
self._terminate_daemon_process()
def _restart(self):
""" Stop, then start.
"""
self._stop()
self._start()
action_funcs = {
'start': _start,
'stop': _stop,
'restart': _restart,
}
def _get_action_func(self):
""" Return the function for the specified action.
Raises ``DaemonRunnerInvalidActionError`` if the action is
unknown.
"""
try:
func = self.action_funcs[self.action]
except KeyError:
raise DaemonRunnerInvalidActionError(
"Unknown action: %(action)r" % vars(self))
return func
def do_action(self):
""" Perform the requested action.
"""
func = self._get_action_func()
func(self)
def emit_message(message, stream=None):
""" Emit a message to the specified stream (default `sys.stderr`). """
if stream is None:
stream = sys.stderr
stream.write("%(message)s\n" % vars())
stream.flush()
def make_pidlockfile(path, acquire_timeout):
""" Make a PIDLockFile instance with the given filesystem path. """
if not isinstance(path, basestring):
error = ValueError("Not a filesystem path: %(path)r" % vars())
raise error
if not os.path.isabs(path):
error = ValueError("Not an absolute path: %(path)r" % vars())
raise error
lockfile = pidlockfile.TimeoutPIDLockFile(path, acquire_timeout)
return lockfile
def is_pidfile_stale(pidfile):
""" Determine whether a PID file is stale.
Return ``True`` (“stale”) if the contents of the PID file are
valid but do not match the PID of a currently-running process;
otherwise return ``False``.
"""
result = False
pidfile_pid = pidfile.read_pid()
if pidfile_pid is not None:
try:
os.kill(pidfile_pid, signal.SIG_DFL)
except OSError as exc:
if exc.errno == errno.ESRCH:
# The specified PID does not exist
result = True
return result
|
NotAGameDev/website | website/migrations/0008_auto_20170422_1629.py | Python | agpl-3.0 | 448 | 0 | # -*- coding: utf-8 -*-
# Generated b | y Django 1.10.6 on 2017-04-22 16:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0007_auto_20170422_1622'),
]
operations = [
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(max_length= | 256),
),
]
|
rtucker-mozilla/mozpackager | vendor-local/lib/python/kombu/utils/compat.py | Python | bsd-3-clause | 4,371 | 0.002745 | """
kombu.utils.compat
==================
Helps compatibility with older Python versions.
"""
############## py3k #########################################################
import sys
is_py3k = sys.version_info[0] == 3
if is_py3k: # pragma: no cover
from io import StringIO, BytesIO
from .encoding import bytes_to_str
class WhateverIO(StringIO):
def write(self, data):
StringIO.write(self, bytes_to_str(data))
else:
from StringIO import StringIO # noqa
BytesIO = WhateverIO = StringIO # noqa
############## __builtins__.next #############################################
try:
next = next
except NameError:
def next(it, *args): # noqa
try:
return it.next()
except StopIteration:
if not args:
raise
return args[0]
############## collections.OrderedDict #######################################
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # noqa
############## queue.LifoQueue ##############################################
from Queue import Queue
class LifoQueue(Queue):
def _init(self, maxsize):
self.queue = []
self.maxsize = maxsize
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
############## logging.handlers.WatchedFileHandler ##########################
import logging
import os
import platform as _platform
from stat import ST_DEV, ST_INO
if _platform.system() == 'Windows':
#since windows doesn't go with WatchedFileHandler use FileHandler instead
WatchedFileHandler = logging.FileHandler
else:
try:
from logging.handlers import WatchedFileHandler
except ImportError:
class WatchedFileHandler(logging.FileHandler): # noqa
"""
A handler for logging to a file, which watches the file
to see if it has changed while in use. This can happen because of
usage of programs such as newsyslog and logrotate which perform
log file rotation. This handler, intended for use under Unix,
watches the file to see if it has changed since the last emit.
(A file has changed if its device or inode have changed.)
If it has changed, the old file stream is closed, and the file
opened to get a new stream.
This handler is not appropriate for use under Windows, because
under Windows open files cannot be moved or renamed - logging
opens the files with exclusive locks - and so there is no need
for such a handler. Furthermore, ST_INO is not supported under
Windows; stat always returns zero for this value.
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
def __init__(self, *args, **kwargs):
logging.FileHandler.__init__(self, *args, **kwargs)
if not os.path.exists(self.baseFilename):
self.dev, self.ino = -1, -1
else:
stat = os.stat(self.baseFilename)
self.dev, self.ino = stat[ST_DEV], | stat[ST_INO]
def emit(self, record):
"""
Emit a record.
| First check if the underlying file has changed, and if it
has, close the old stream and reopen the file to get the
current stream.
"""
if not os.path.exists(self.baseFilename):
stat = None
changed = 1
else:
stat = os.stat(self.baseFilename)
changed = ((stat[ST_DEV] != self.dev) or
(stat[ST_INO] != self.ino))
if changed and self.stream is not None:
self.stream.flush()
self.stream.close()
self.stream = self._open()
if stat is None:
stat = os.stat(self.baseFilename)
self.dev, self.ino = stat[ST_DEV], stat[ST_INO]
logging.FileHandler.emit(self, record)
|
aronsky/home-assistant | homeassistant/components/forked_daapd/media_player.py | Python | apache-2.0 | 30,898 | 0.000874 | """This library brings support for forked_daapd to Home Assistant."""
import asyncio
from collections import defaultdict
import logging
from pyforked_daapd import ForkedDaapdAPI
from pylibrespot_java import LibrespotJavaAPI
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import MEDIA_TYPE_MUSIC
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
STATE_IDLE,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.util.dt import utcnow
from .const import (
CALLBACK_TIMEOUT,
CONF_LIBRESPOT_JAVA_PORT,
CONF_MAX_PLAYLISTS,
CONF_TTS_PAUSE_TIME,
CONF_TTS_VOLUME,
DEFAULT_TTS_PAUSE_TIME,
DEFAULT_TTS_VOLUME,
DEFAULT_UNMUTE_VOLUME,
DOMAIN,
FD_NAME,
HASS_DATA_REMOVE_LISTENERS_KEY,
HASS_DATA_UPDATER_KEY,
KNOWN_PIPES,
PIPE_FUNCTION_MAP,
SIGNAL_ADD_ZONES,
SIGNAL_CONFIG_OPTIONS_UPDATE,
SIGNAL_UPDATE_DATABASE,
SIGNAL_UPDATE_MASTER,
SIGNAL_UPDATE_OUTPUTS,
SIGNAL_UPDATE_PLAYER,
SIGNAL_UPDATE_QUEUE,
SOURCE_NAME_CLEAR,
SOURCE_NAME_DEFAULT,
STARTUP_DATA,
SUPPORTED_FEATURES,
SUPPORTED_FEATURES_ZONE,
TTS_TIMEOUT,
)
_LOGGER = logging.getLogger(__name__)
WS_NOTIFY_EVENT_TYPES = ["player", "outputs", "volume", "options", "queue", "database"]
WEBSOCKET_RECONNECT_TIME = 30 # seconds
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up forked-daapd from a config entry."""
host = config_entry.data[CONF_HOST]
port = config_entry.data[CONF_PORT]
password = config_entry.data[CONF_PASSWORD]
forked_daapd_api = ForkedDaapdAPI(
async_get_clientsession(hass), host, port, password
)
forked_daapd_master = ForkedDaapdMaster(
clientsession=async_get_clientsession(hass),
api=forked_daapd_api,
ip_address=host,
api_port=port,
api_password=password,
config_entry=config_entry,
)
@callback
def async_add_zones(api, outputs):
zone_entities = []
for output in outputs:
zone_entities.append(ForkedDaapdZone(api, output, config_entry.entry_id))
async_add_entities(zone_entities, False)
remove_add_zones_listener = async_dispatcher_connect(
hass, SIGNAL_ADD_ZONES.format(config_entry.entry_id), async_add_zones
)
remove_entry_listener = config_entry.add_update_listener(update_listener)
if not hass.data.get(DOMAIN):
hass.data[DOMAIN] = {config_entry.entry_id: {}}
hass.data[DOMAIN][config_entry.entry_id] = {
HASS_DATA_REMOVE_LISTENERS_KEY: [
remove_add_zones_listener,
remove_entry_listener,
]
}
async_add_entities([forked_daapd_master], False)
forked_daapd_updater = ForkedDaapdUpdater(
hass, forked_daapd_api, config_entry.entry_id
)
await forked_daapd_updater.async_init()
hass.data[DOMAIN][config_entry.entry_id][
HASS_DATA_UPDATER_KEY
] = forked_daapd_updater
async def update_listener(hass, entry):
"""Handle options update."""
async_dispatcher_send(
hass, SIGNAL_CONFIG_OPTIONS_UPDATE.format(entry.entry_id), entry.options
)
class ForkedDaapdZone(MediaPlayerEntity):
"""Representation of a forked-daapd output."""
def __init__(self, api, output, entry_id):
"""Initialize the ForkedDaapd Zone."""
self._api = api
self._output = output
self._output_id = output["id"]
self._last_volume = DEFAULT_UNMUTE_VOLUME # used for mute/unmute
self._available = True
self._entry_id = entry_id
async def async_added_to_hass(self):
"""Use lifecycle hooks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_OUTPUTS.format(self._entry_id),
self._async_update_output_callback,
)
)
@callback
def _async_update_output_callback(self, outputs, _event=None):
new_output = next(
(output for output in outputs if output["id"] == self._output_id), None
)
self._available = bool(new_output)
if self._available:
self._output = new_output
self.async_write_ha_state()
@property
def unique_id(self):
"""Return unique ID."""
return f"{self._entry_id}-{self._output_id}"
@property
def should_poll(self) -> bool:
"""Entity pushes its state to HA."""
return False
async def async_toggle(self):
"""Toggle the power on the zone."""
if self.state == STATE_OFF:
await self.async_turn_on()
else:
await self.async_turn_off()
@property
def available(self) -> bool:
"""Return whether the zone is available."""
return self._available
async def async_turn_on(self):
"""Enable the output."""
await self._api.change_output(self._output_id, selected=True)
async def async_turn_off(self):
"""Disable the output."""
await self._api.change_output(self._output_id, selected=False)
@property
def name(self):
"""Return the name of the zone."""
return f"{FD_NAME} output ({self._output['name']})"
@property
def state(self):
"""State of the zone."""
if self._output["selected"]:
return STATE_ON
return STATE_OFF
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._output["volume"] / 100
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._output["volume"] == 0
async def async_mute_volume(self, mute):
"""Mute the volume."""
if mute:
if self.volume_level | == 0:
return
self._last_volume = self.volume_level # store volume level to restore later
target_volume = 0
else:
target_volume = self._last_volume # restore volume level
await self.async_set_volume_level(volume=target_volume)
async def async_set_volume_level(self, volume):
"""Set volume - input range [0,1]."""
await self._api.set_volume(volume=volume * 100, o | utput_id=self._output_id)
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORTED_FEATURES_ZONE
class ForkedDaapdMaster(MediaPlayerEntity):
"""Representation of the main forked-daapd device."""
def __init__(
self, clientsession, api, ip_address, api_port, api_password, config_entry
):
"""Initialize the ForkedDaapd Master Device."""
self._api = api
self._player = STARTUP_DATA[
"player"
] # _player, _outputs, and _queue are loaded straight from api
self._outputs = STARTUP_DATA["outputs"]
self._queue = STARTUP_DATA["queue"]
self._track_info = defaultdict(
str
) # _track info is found by matching _player data with _queue data
self._last_outputs = [] # used for device on/off
self._last_volume = DEFAULT_UNMUTE_VOLUME
self._player_last_updated = None
self._pipe_control_api = {}
self._ip_address = (
ip_address # need to save this because pipe control is on same ip
)
self._tts_pause_time = DEFAULT_TTS_PAUSE_TIME
self._tts_volume = DEFAULT_TTS_VOLUME
self._tts_requested = False
self._tts_queued = False
self._tts_playing_event = asyncio.Event()
self._on_remove = None
self._available = False
self._clientsession = clientsession
self._config_entry = config_entry
self.update_options(config_entry.options)
self._paused_event = asyncio.Event()
sel |
yamaguchiyuto/icwsm15 | tag_follow_disagreement.py | Python | mit | 857 | 0.005834 | import sys
tagging_filepath = sys.argv[1]
following_filepath = sys.argv[2]
delim = '\t'
if len(sys.argv) > 3:
delim = sys.argv[3]
graph = {}
for line in open(tagging_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if not src in graph: graph[src] = {}
graph[src][dst] = 0
for line in open(following_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if src in graph and dst in gra | ph[src]:
graph[src][dst] += 1
if dst in graph and src in graph[dst]:
graph[dst][src] += 2
w_dir = 0
wo_dir = 0
count = 0.0
for src in graph:
for dst in graph[src]:
val = graph[src][dst]
count += 1
if val in [1,3]:
w_dir += 1
if val in [1,2,3]:
wo_dir += 1
print | "%s\t%s" % (w_dir/count, wo_dir/count)
|
christianurich/VIBe2UrbanSim | 3rdparty/opus/src/synthesizer/gui/default_census_cat_transforms.py | Python | gpl-2.0 | 42,838 | 0.00845 | # PopGen 1.1 is A Synthetic Population Generator for Advanced
# Microsimulation Models of Travel Demand
# Copyright (C) 2009, Arizona State University
# See PopGen/License
DEFAULT_PERSON_PUMS2000_QUERIES = [ "alter table person_pums add column agep bigint",
"alter table person_pums add column gender bigint",
"alter table person_pums add column race bigint",
"alter table person_pums add column employment bigint",
"update person_pums set agep = 1 where age < 5",
"update person_pums set agep = 2 where age >= 5 and age < 15",
"update person_pums set agep = 3 where age >= 15 and age < 25",
"update person_pums set agep = 4 where age >= 25 and age < 35",
"update person_pums set agep = 5 where age >= 35 and age < 45",
"update person_pums set agep = 6 where age >= 45 and age < 55",
"update person_pums set agep = 7 where age >= 55 and age < 65",
"update person_pums set agep = 8 where age >= 65 and age < 75",
"update person_pums set agep = 9 where age >= 75 and age < 85",
"update person_pums set agep = 10 where age >= 85",
"update person_pums set gender = sex",
"update person_pums set race = 1 where race1 = 1",
"update person_pums set race = 2 where race1 = 2",
"update person_pums set race = 3 where race1 >=3 and race1 <= 5",
"update person_pums set race = 4 where race1 = 6",
"update person_pums set race = 5 where race1 = 7",
"update person_pums set race = 6 where race1 = 8",
"update person_pums set race = 7 where race1 = 9",
"update person_pums set employment = 1 where esr = 0",
"update person_pums set employment = 2 where esr = 1 or esr = 2 or esr = 4 or esr = 5",
"update person_pums set emp | loyment = 3 where esr = 3",
"update person_pums set employment = 4 wher | e esr = 6",
"drop table person_sample",
"create table person_sample select state, pumano, hhid, serialno, pnum, agep, gender, race, employment, relate from person_pums",
"alter table person_sample add index(serialno, pnum)",
"drop table hhld_sample_temp",
"alter table hhld_sample drop column hhldrage",
"alter table hhld_sample rename to hhld_sample_temp",
"drop table hhld_sample",
"create table hhld_sample select hhld_sample_temp.*, agep as hhldrage from hhld_sample_temp left join person_sample using(serialno) where relate = 1",
"alter table hhld_sample add index(serialno)",
"update hhld_sample set hhldrage = 1 where hhldrage <=7 ",
"update hhld_sample set hhldrage = 2 where hhldrage >7"]
DEFAULT_PERSON_PUMSACS_QUERIES = ["alter table person_pums change agep age bigint",
"alter table person_pums change puma pumano bigint",
"alter table person_pums change rac1p race1 bigint",
"alter table person_pums change st state bigint",
"alter table person_pums change sporder pnum bigint",
"alter table person_pums change rel relate bigint",
"alter table person_pums add column agep bigint",
"alter table person_pums add column gender bigint",
"alter table person_pums add column race bigint",
"alter table person_pums add column employment bigint",
"update person_pums set agep = 1 where age < 5",
"update person_pums set agep = 2 where age >= 5 and age < 15",
"update person_pums set agep = 3 where age >= 15 and age < 25",
"update person_pums set agep = 4 where age >= 25 and age < 35",
"update person_pums set agep = 5 where age >= 35 and age < 45",
"update person_pums set agep = 6 where age >= 45 and age < 55",
"update person_pums set agep = 7 where age >= 55 and age < 65",
"update person_pums set agep = 8 where age >= 65 and age < 75",
"update person_pums set agep = 9 where age >= 75 and age < 85",
"update person_pums set agep = 10 where age >= 85",
"update person_pums set gender = sex",
"update person_pums set race = 1 where race1 = 1",
"update person_pums set race = 2 where race1 = 2",
"update person_pums set race = 3 where race1 >=3 and race1 <= 5",
"update person_pums set race = 4 where race1 = 6",
"update person_pums set race = 5 where race1 = 7",
"update person_pums set race = 6 where race1 = 8",
"update person_pums set race = 7 where race1 = 9",
"update person_pums set employment = 1 where esr = 0",
"update person_pums set employment = 2 where esr = 1 or esr = 2 or esr = 4 or esr = 5",
"update person_pums set employment = 3 where esr = 3",
"update person_pums set employment = 4 where esr = 6",
"alter table person_pums add index(serialno)",
"create table person_pums1 select person_pums.*, hhid from person_pums left join serialcorr using(serialno)",
"update person_pums1 set serialno = hhid",
"drop table person_sample",
"create table person_sample select state, pumano, hhid, serialno, pnum, agep, gender, race, employment, relate from person_pums1",
"alter table person_sample add index(serialno, pnum)",
"drop table hhld_sample_temp",
"alter table hhld_sample drop column hhldrage",
"alter table hhld_sample rename to hhld_sample_temp",
"drop table hhld_sample",
"create table hhld_sample select hhld_sample_temp.*, agep as hhldrage from hhld_sample_temp left join person_sample using(serialno) where relate = 0",
"alter table hhld_sample add index(serialno)",
"update hhld_sample set hhldrage = 1 where hhldrage <=7 ",
"update hhld_sample set hhldrage = 2 where hhldrage >7",
"drop table hhld_sample_temp",
"drop table person_pums1"]
DEFAULT_HOUSING_PUMS2000_QUERIES = ["alter table housing_pums add index(serialno)",
"alter table housing_pums add column hhtype big |
GNOME/pygtkimageview | tests/demo03.py | Python | lgpl-2.1 | 645 | 0.003101 | '''
Shows how to load SVG files in gtkimageview. It is really easy.
'''
imp | ort gtk
from gtk import gdk
import gtkimageview
import sys
try:
import rsvg
except ImportError:
print 'Sorry! rsvg is needed to run this demo.'
sys.exit(1)
try:
handle = rsvg.Handle(sys.argv[1])
except IndexError:
print 'Usage: %s SVG-image' % sys.argv[0]
sys.exit(1)
pixbuf = handle.get_pixbuf()
# Setup the view
view = gtkimageview.ImageView()
view.set_pixbuf(pixbuf)
win = gtk.Window()
win.connect('delete-event', gtk.main_q | uit)
win.add(gtkimageview.ImageScrollWin(view))
win.set_default_size(400, 300)
win.show_all()
gtk.main()
|
jdfekete/progressivis | tests/__init__.py | Python | bsd-2-clause | 2,610 | 0.000383 | from __future__ import annotations
from os import getenv
import gc
import sys
from unittest import TestCase, main
from unittest import skip as skip
from unittest import skipIf as skipIf
import logging
from progressivis import Scheduler, log_level
from progressivis.storage import init_temp_dir_if, cleanup_temp_dir
import numpy as np
from typing import Any, Type, Optional
_ = skip # shut-up pylint
__ = skipIf
class ProgressiveTest(TestCase):
CRITICAL = logging.CRITICAL
ERROR = logging.ERROR
WARNING = logging.WARNING
INFO = logging.INFO
DEBUG = logging.DEBUG
NOTSET = logging.NOTSET
levels = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
def __init__(self, *args: Any) -> None:
super(ProgressiveTest, self).__init__(*args)
self._output: bool = False
self._scheduler: Optional[Scheduler] = None
self._temp_dir_flag: bool = False
level: Any = getenv("LOGLEVEL")
if level in ProgressiveTest.levels:
level = ProgressiveTest.levels[level]
if leve | l:
print(f"Logger level {level} for {self}", file=sys.stderr)
self.log(int(level))
@staticmethod
def terse(x: Any) -> None:
_ = x
print(".", end="", file=sys.std | err)
@staticmethod
async def _stop(scheduler: Scheduler, run_number: int) -> None:
await scheduler.stop()
def setUp(self) -> None:
np.random.seed(42)
def tearDown(self) -> None:
# print('Logger level for %s back to ERROR' % self, file=sys.stderr)
# self.log()
gc.collect()
logger = logging.getLogger()
logger.setLevel(logging.NOTSET)
while logger.hasHandlers():
logger.removeHandler(logger.handlers[0])
@classmethod
def cleanup(self) -> None:
cleanup_temp_dir()
@classmethod
def setUpClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
init_temp_dir_if()
@classmethod
def tearDownClass(cls: Type[ProgressiveTest]) -> None:
cleanup_temp_dir()
def scheduler(self, clean: bool = False) -> Scheduler:
if self._scheduler is None or clean:
self._scheduler = Scheduler()
return self._scheduler
@staticmethod
def log(level: int = logging.NOTSET, package: str = "progressivis") -> None:
log_level(level, package=package)
@staticmethod
def main() -> None:
main()
|
pypa/virtualenv | src/virtualenv/seed/embed/via_app_data/via_app_data.py | Python | mit | 6,032 | 0.002155 | """Bootstrap"""
from __future__ import absolute_import, unicode_literals
import logging
import sys
import traceback
from contextlib import contextmanager
from subprocess import CalledProcessError
from threading import Lock, Thread
from virtualenv.in | fo import fs_supports_symlink
from virtualenv.seed.embed.base_embed import BaseEmbed
from virtualenv.seed.wheels import get_wheel
from virtualenv.util.path import Path
from .pip_install.copy import CopyPipInstall
from .pip_install.symlink import SymlinkPipInstall
class FromAppData(BaseEmbed):
def __init__(self, options) | :
super(FromAppData, self).__init__(options)
self.symlinks = options.symlink_app_data
@classmethod
def add_parser_arguments(cls, parser, interpreter, app_data):
super(FromAppData, cls).add_parser_arguments(parser, interpreter, app_data)
can_symlink = app_data.transient is False and fs_supports_symlink()
parser.add_argument(
"--symlink-app-data",
dest="symlink_app_data",
action="store_true" if can_symlink else "store_false",
help="{} symlink the python packages from the app-data folder (requires seed pip>=19.3)".format(
"" if can_symlink else "not supported - ",
),
default=False,
)
def run(self, creator):
if not self.enabled:
return
with self._get_seed_wheels(creator) as name_to_whl:
pip_version = name_to_whl["pip"].version_tuple if "pip" in name_to_whl else None
installer_class = self.installer_class(pip_version)
exceptions = {}
def _install(name, wheel):
try:
logging.debug("install %s from wheel %s via %s", name, wheel, installer_class.__name__)
key = Path(installer_class.__name__) / wheel.path.stem
wheel_img = self.app_data.wheel_image(creator.interpreter.version_release_str, key)
installer = installer_class(wheel.path, creator, wheel_img)
parent = self.app_data.lock / wheel_img.parent
with parent.non_reentrant_lock_for_key(wheel_img.name):
if not installer.has_image():
installer.build_image()
installer.install(creator.interpreter.version_info)
except Exception: # noqa
exceptions[name] = sys.exc_info()
threads = list(Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items())
for thread in threads:
thread.start()
for thread in threads:
thread.join()
if exceptions:
messages = ["failed to build image {} because:".format(", ".join(exceptions.keys()))]
for value in exceptions.values():
exc_type, exc_value, exc_traceback = value
messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback)))
raise RuntimeError("\n".join(messages))
@contextmanager
def _get_seed_wheels(self, creator):
name_to_whl, lock, fail = {}, Lock(), {}
def _get(distribution, version):
for_py_version = creator.interpreter.version_release_str
failure, result = None, None
# fallback to download in case the exact version is not available
for download in [True] if self.download else [False, True]:
failure = None
try:
result = get_wheel(
distribution=distribution,
version=version,
for_py_version=for_py_version,
search_dirs=self.extra_search_dir,
download=download,
app_data=self.app_data,
do_periodic_update=self.periodic_update,
env=self.env,
)
if result is not None:
break
except Exception as exception: # noqa
logging.exception("fail")
failure = exception
if failure:
if isinstance(failure, CalledProcessError):
msg = "failed to download {}".format(distribution)
if version is not None:
msg += " version {}".format(version)
msg += ", pip download exit code {}".format(failure.returncode)
output = failure.output if sys.version_info < (3, 5) else (failure.output + failure.stderr)
if output:
msg += "\n"
msg += output
else:
msg = repr(failure)
logging.error(msg)
with lock:
fail[distribution] = version
else:
with lock:
name_to_whl[distribution] = result
threads = list(
Thread(target=_get, args=(distribution, version))
for distribution, version in self.distribution_to_versions().items()
)
for thread in threads:
thread.start()
for thread in threads:
thread.join()
if fail:
raise RuntimeError("seed failed due to failing to download wheels {}".format(", ".join(fail.keys())))
yield name_to_whl
def installer_class(self, pip_version_tuple):
if self.symlinks and pip_version_tuple:
# symlink support requires pip 19.3+
if pip_version_tuple >= (19, 3):
return SymlinkPipInstall
return CopyPipInstall
def __unicode__(self):
base = super(FromAppData, self).__unicode__()
msg = ", via={}, app_data_dir={}".format("symlink" if self.symlinks else "copy", self.app_data)
return base[:-1] + msg + base[-1]
|
ijzer/cwbot-ndy | kol/request/BountyHunterRequest.py | Python | bsd-3-clause | 3,005 | 0.007987 | from kol.database import ItemDatabase
from kol.manager import PatternManager
from kol.request.GenericRequest import GenericRequest
class BountyHunterRequest(GenericRequest):
"""Interacts with the Bounty Hunter Hunter in the Forest Village."""
VISIT = None
ACCEPT_BOUNTY = 'takebounty'
ABANDON_BOUNTY = 'abandonbounty'
BUY = 'buy'
def __init__(self, session, action=None, item=None, quantity=None):
"""Initialize a Bounty Hunter Hunter request.
Args:
session: A valid logged in session.
action: Optional action. If None, the request just "visits" the Bounty Hunter Hunter and
determines which bounties are available.
Otherwise, one of: 'takebounty', 'abandonbounty' or 'buy'
item: Optional item id. When accepting bounty assignment, this is the id of the bounty item
(for example, 2099 for hobo gristle).
When buying items using filthy lucre, this is the descid of the purchased item
(e.g. 810074020 for Manual of Transcendent Olfaction).
quantity: Optional number of items being purchased for filthy lucre.
"""
super(BountyHunterRequest, self).__init__(session)
self.session = session
self.url = session.serverURL + "bhh.php"
self.requestData["pwd"] = session.pwd
if action:
self.requestData['action'] = action
if quantity:
self.requestData['quantity'] = quantity
if item:
self.requestData['whichitem'] = item
def parseResponse(self):
response = {}
bountyAvailablePattern = PatternManager.getOrCompilePattern('bountyAvailable')
if bountyAvailablePattern.search(self.responseText):
bountyAvailable = True
else:
| bountyAvaila | ble = False
bountyChosenPattern = PatternManager.getOrCompilePattern('bountyChosen')
bountyActivePattern1 = PatternManager.getOrCompilePattern('bountyActive1')
bountyActivePattern2 = PatternManager.getOrCompilePattern('bountyActive2')
if bountyChosenPattern.search(self.responseText) or \
bountyActivePattern1.search(self.responseText) or \
bountyActivePattern2.search(self.responseText):
bountyActive = True
else:
bountyActive = False
dailyBounties = []
if bountyAvailable:
bountyPattern = PatternManager.getOrCompilePattern('dailyBountyItem')
for match in bountyPattern.finditer(self.responseText):
itemId = int(match.group('itemid'))
item = ItemDatabase.getItemFromId(itemId)
dailyBounties.append(item)
response['bountyAvailable'] = bountyAvailable
response['bountyActive'] = bountyActive
response['dailyBounties'] = dailyBounties
self.responseData = response
|
benjiyamin/nimbus | nimbus/network/links/weir.py | Python | gpl-2.0 | 4,270 | 0.004215 | import math
from .link import Link
from nimbus.reports import report as rp
from nimbus.reports import input as inp
from nimbus.network.links.sections import circle as cir
from nimbus.network.links.sections import rectangle as rct
class Weir(Link):
def __init__(self, name=None, section=None, orif_coef=None, weir_coef=None, invert=None, node1=None, node2=None):
super(Weir, self).__init__(name, node1, node2, section)
self.orif_coef = orif_coef
self.weir_coef = weir_coef
self.invert = invert
self.report = inp.InputReport(self)
def get_flow(self, stage1, stage2):
"""Return the flow of the weir given the stages on both sides of the link."""
crown = self.invert + self.section.rise / 12.0
center = self.invert + self.section.rise / 12.0 / 2.0
if stage1 > stage2: # stage 1 higher
if stage1 > crown: # orifice flow
if stage2 < self.invert: # free flow
eff_head = stage1 - center
else: # submerged flow
eff_head = stage1 - stage2
area = self.section.get_flow_area(self.section.rise)
flow = self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage1 > self.invert: # weir flow
eff_head = stage1 - self.invert
flow = self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
if stage2 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage2 / stage1, 1.5), 0.385)
else:
flow = 0.0
else: # stage 2 higher
if stage2 > crown: # orifice flow
if stage1 < self.invert: # free flow
eff_head = stage2 - center
else: # submerged flow
eff_head = stage2 - stage1
area = self.section.get_flow_area(self.section.rise)
flow = -self.orif_coef * area * math.sqrt(2.0 * 32.2 * eff_head)
elif stage2 > self.invert: # weir flow
eff_head = stage2 - self.invert
flow = -self.weir_coef * self.section.span / 12.0 * pow(eff_head, 1.5)
| if stage1 > self.invert: # submerged flow
flow *= 1.0 - pow(pow(stage1 / stage2, 1.5), | 0.385)
else:
flow = 0.0
return flow
def get_input_strings(self):
if self.section:
shape_type = rp.property_to_string(self.section.__class__, '__name__')
shape_span = rp.float_to_string(self.section.span, 3)
shape_rise = rp.float_to_string(self.section.rise, 3)
else:
shape_type = 'Undefined'
shape_span = 'Undefined'
shape_rise = 'Undefined'
inputs = ['Name: ' + rp.property_to_string(self, 'name'),
'Shape Type: ' + shape_type,
'Span (in): ' + shape_span,
'Rise (in): ' + shape_rise,
'Orifice Coef.: ' + rp.float_to_string(self.orif_coef, 3),
'Weir. Coef: ' + rp.float_to_string(self.weir_coef, 3),
'Invert: ' + rp.float_to_string(self.invert, 3)]
return inputs
def set_shape_as_rectangle(self, span, rise, horizontal=False):
self.section = rct.Rectangle(span, rise, horizontal)
return
def set_shape_as_circle(self, diameter, horizontal=False):
self.section = cir.Circle(diameter, horizontal)
return
|
google-research/meta-dataset | meta_dataset/data/tfds/api.py | Python | apache-2.0 | 9,094 | 0.004728 | # coding=utf-8
# Copyright 2022 The Meta-Dataset Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Meta-Dataset TFDS API."""
import concurrent.futures
import functools
from typing import Optional
from absl import logging
from meta_dataset.data import config as config_lib
from meta_dataset.data import dataset_spec as dataset_spec_lib
from meta_dataset.data import learning_spec
from meta_dataset.data import pipeline
from meta_dataset.data import reader
from meta_dataset.data import sampling
from meta_dataset.data import tfds as meta_dataset_lib
import tensorflow as tf
import tensorflow.random.experimental as tfre
import tensorflow_datasets as tfds
_NUM_WORKERS = 10
create_rng_state = functools.partial(tf.random.create_rng_state, alg='threefry')
def episode_dataset(
builder,
md_version,
meta_split,
source_id = None,
shuffle_seed = None,
**as_dataset_kwargs):
"""Creates an episode dataset.
This function creates an episode dataset for a single source. For multi-source
pipelines, use the `meta_dataset` function.
Args:
builder: Meta-Dataset builder.
md_version: Meta-Dataset md_version in {'v1', 'v2'}.
meta_split: meta-split (case-insensitive) in {'train', 'valid', 'test'}.
source_id: source ID to output alongside episodes, if not None.
shuffle_seed: class dataset shuffle buffer seed.
**as_dataset_kwargs: kwargs passed to the `as_dataset` method.
Returns:
The episode dataset.
"""
dataset_spec = dataset_spec_lib.as_dataset_spec(
builder.info.metadata['dataset_specs'][md_version])
data_config = config_lib.DataConfig()
episode_description_config = config_lib.EpisodeDescriptionConfig()
def _as_class_dataset(args):
relative_label, seed = args
seed = None if seed is None else seed[0]
class_dataset = builder.as_class_dataset(
md_version=md_version,
meta_split=meta_split,
relative_label=relative_label,
shuffle_buffer_size=data_config.shuffle_buffer_size,
shuffle_seed=seed,
num_prefetch=data_config.num_prefetch,
decoders={'image': tfds.decode.SkipDecoding()},
read_config=tfds.ReadConfig(try_autocache=False),
as_supervised=True,
**as_dataset_kwargs
).repeat()
logging.info("Created class %d for %s's meta-%s split",
relative_label, builder.builder_config.name, meta_split)
return class_dataset
num_classes = len(dataset_spec.get_classes(getattr(learning_spec.Split,
meta_split.upper())))
# If a shuffle seed is passed, we split it into `num_classes` independent
# shufle seeds so that each class datasets' shuffle buffer is seeded
# differently. Sharing random seeds across shuffle buffers is considered bad
# practice because it can introduce correlations across random sequences of
# examples for different classes.
shuffle_seeds = ([None] * num_classes if shuffle_seed is None else
tfre.stateless_split(create_rng_state(shuffle_seed),
num_classes))
with concurrent.futures.ThreadPoolExecutor(
max_workers=_NUM_WORKERS) as executor:
class_datasets = list(executor.map(_as_class_dataset,
enumerate(shuffle_seeds)))
placeholder_id = reader.PLACEHOLDER_CLASS_ID
class_datasets.append(tf.data.Dataset.zip((
tf.data.Dataset.from_tensors(b'').repeat(),
tf.data.Dataset.from_tensors(tf.cast(placeholder_id, tf.int64)).repeat()
)))
sampler = sampling.EpisodeDescriptionSampler(
dataset_spec,
getattr(learning_spec.Split, meta_split.upper()),
episode_description_config,
use_dag_hierarchy=builder.builder_config.name == 'ilsvrc_2012',
use_bilevel_hierarchy=builder.builder_config.name == 'omniglot')
chunk_sizes = sampler.compute_chunk_sizes()
dataset = tf.data.Dataset.choose_from_datasets(
class_datasets,
tf.data.Dataset.from_generator(
functools.partial(
reader.episode_representation_generator,
dataset_spec=dataset_spec,
split=getattr(learning_spec.Split, meta_split.upper()),
pool=None,
sampler=sampler),
tf.int64,
tf.TensorShape([None, 2]),
).map(reader.decompress_episode_representation).unbatch()
).batch(
sum(chunk_sizes)
).prefetch(
1
).map(
functools.partial(
pipeline.process_episode,
chunk_sizes=chunk_sizes,
image_size=data_config.image_height,
simclr_episode_fraction=(
episode_description_config.simclr_episode_fraction))
)
if source_id is not None:
dataset = tf.data.Dataset.zip(
(dataset, tf.data.Dataset.from_tensors(source_id).repeat()))
return dataset
def full_ways_dataset(
md_source,
md_version,
meta_split,
shuffle_files=True,
read_config=None,
data_dir=None,
version=None,
**as_dataset_kwargs):
"""Creates a full-ways dataset.
Here, "full-ways" means that the label space is constructed from all of
`md_source`'s `meta_split` classes. For instance, calling `full_ways_dataset`
with `md_source='aircraft'` and `meta_split='valid'` returns a dataset with
all of Aircraft's validation classes.
Args:
md_source: data source from which to construct the full-ways dataset.
md_version: Meta-Dataset md_version in {'v1', 'v2'}.
meta_split: meta-split (case-insensitive) in {'train', 'valid', 'test'}.
shuffle_files: WRITEME.
read_config: WRITEME.
data_dir: TFDS data directory.
version: dataset version at which to load the data. Note that this refers
to the dataset implementation version, and is **not** the same as
the benchmark verion (either 'v1' or 'v2').
**as_dataset_kwargs: kwargs passed to the `as_dataset` method.
Returns:
The full-ways dataset.
"""
builder = meta_dataset_lib.MetaDataset(
data_dir=data_dir,
config=md_source,
version=version
)
start, stop = builder.get_start_stop(md_version, meta_split)
read_config = read_config or tfds.ReadConfig(
| interleave_cycle_length=builder.info.splits[
f'all_classes[{st | art}:{stop}]'].num_shards,
interleave_block_length=4,
enable_ordering_guard=False
)
return builder.as_full_ways_dataset(
md_version=md_version,
meta_split=meta_split,
shuffle_files=shuffle_files,
read_config=read_config,
**as_dataset_kwargs
)
def meta_dataset(
md_sources,
md_version,
meta_split,
shuffle_seed=None,
source_sampling_seed=None,
data_dir=None,
version=None,
**as_dataset_kwargs):
"""Creates a Meta-Dataset dataset.
This function creates an episode dataset for all sources in `md_sources`. For
single-source pipelines, use the `episode_dataset` function.
Args:
md_sources: data sources from which to draw episodes.
md_version: Meta-Dataset md_version in {'v1', 'v2'}.
meta_split: meta-split (case-insensitive) in {'train', 'valid', 'test'}.
shuffle_seed: class dataset shuffle buffer seed.
source_sampling_seed: random seed for source sampling.
data_dir: TFDS data directory.
version: dataset version at which to load the data. Note that this refers
to the dataset implementation version, and is **not** the same as
the benchmark verion (either 'v1' or 'v2').
**as_dataset_kwargs: kwargs passed to the `as_dataset` method.
Returns:
The episode dataset.
"""
episode_datasets = []
# If a shuffle seed is passed, we split it into `len(md_sources)` |
Th3l5D/lsbot | default.py | Python | mit | 5,164 | 0.004841 | import socket
import plugin
import ssl
import user
import os
class bot(object):
def __init__(self, server):
self.server = server
self.port = 6667
self.ssl = None
self.channels = []
self.connectedChannels = []
self.nick = 'default_nick'
self.realName = 'default_nick default_nick'
self.socket = None
self.debugger = True
self.allowedCommands = {
'ping': self.ping, 'privmsg': self.privmsg, 'invite': self.invite,
'join': self.join, '433': self.f433, '307':self.f307, '353':self.f353}
self.autoInvite = True
self.plugins = plugin.Plugin(self.rawSend)
self.userlist = {}
def debug(self, line):
if self.debugger is not None:
print(line)
def connect(self):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.ssl is not None:
self.socket = ssl.wrap_socket(self.socket)
self.socket.connect((self.server, self.port))
self.authenticate()
def authenticate(self):
self.rawSend('NICK', self.nick)
self.rawSend('USER', ' '.join((self.nick, self.nick, self.realName)))
def joinChannel(self, channel = None):
if channel is not None:
self.channels.append(channel)
for chan in self.channels:
chan = chan.lower()
if chan not in self.connectedChannels:
self.rawSend('JOIN', chan)
self.connectedChannels.append(chan)
def rawSend(self, command, content, dest = ''):
line = ' '.join((command, dest, content, '\r\n'))
self.debug(line)
self.socket.send(bytes(line, 'UTF-8'))
def splitLine(self, line):
datas_dict = {}
if line.startswith(':'):
datas_dict['from'], line = line[1:].split(' ', 1)
datas_dict['from'] = datas_dict['from'].split('!')[0]
datas = line.split(' ', 1)
datas_dict['co | mmand'] = datas[0]
if datas_dict['command'].isdigit():
# numeric commands are server response and don't follow any logic. annoying :/
# so we just put the whole line into content. Parsing is done in functions
datas_dict['content'] = datas[1]
else:
splited = datas[1].split(':', 1)
if len(splited) > 1:
datas_dict['to'] = splited[0].strip().lower()
datas_dict['conten | t'] = splited[1]
else:
datas_dict['to'], datas_dict['content'] = splited[0].split(' ', 1)
return datas_dict
def parseLine(self, line):
self.debug(line)
datas = self.splitLine(line)
self.debug(datas)
if datas['command'].lower() in self.allowedCommands.keys():
self.allowedCommands[datas['command'].lower()](datas)
if datas['command'] == 'MODE':
self.joinChannel()
pass
def listen(self):
queue = ''
while(1):
raw = self.socket.recv(1024).decode('UTF-8', 'replace')
queue = ''.join((queue, raw))
splited = queue.split('\r\n')
if len(splited) > 1:
for line in splited[:-1]:
self.parseLine(line)
queue = splited[-1]
# received commands
def ping(self, datas):
self.rawSend('PONG', datas['content'])
def invite(self, datas):
if self.autoInvite:
self.joinChannel(datas['content'])
def privmsg(self, datas):
if(datas['to'] in self.connectedChannels):
# get first word, to check if it's a plugin
word = datas['content'].split(' ', 1)[0]
if(word.startswith('!') and word[1:].isalnum()):
self.plugins.execute(datas, self.userlist)
def join(self, datas):
self.whois(datas['from'])
def f433(self, datas):
# nickname is already in use.
self.debug('nick utilise. Adding a _')
b.nick = b.nick+'_'
self.authenticate()
def f307(self, datas):
# user is identified
user = datas['content'].split()[1]
self.userlist[user].identified = True
self.debug(self.userlist)
def f353(self, datas):
# list users connected to a channel
users = datas['content'].split(':')[1].split()
for user in users:
self.whois(user)
# send commands
def whois(self, username):
username = username.strip('+&@~')
self.rawSend('WHOIS', '', username)
self.userlist[username] = user.user(username)
if __name__ == '__main__':
conf_file = open(os.path.dirname(os.path.realpath(__file__))+'/config.ini').readlines()
config = {}
for line in conf_file:
if line.strip()[0] is not '#':
splited = line.split('=', 1)
config[splited[0].strip()] = splited[1].strip()
b = bot(config['server'])
b.nick=config['nick']
b.ssl = config['ssl']
b.port = int(config['port'])
for chan in config['channels'].split(','):
b.channels.append(chan.strip())
b.connect()
b.listen()
|
ack8006/hapipy | hapi/nurturing.py | Python | apache-2.0 | 890 | 0.01236 | from base import BaseClient
NURTURING_API_VERSION = '1'
class NurturingClient(BaseClient):
def _get_path(self, subpath):
return 'nurture/v%s/%s' % (NURTURING_API_VERSION, subpath)
def get_campaigns(self, **options):
return self._call('campaigns', **options)
def get_leads(self, campaign_guid, **options):
return self._call('campaign/%s/list' % campaign_guid, **options)
def get_history(self, lead_guid, **options):
return self._call('lead/%s' % lead_guid, **options)
def enroll_lead(self, campaign_guid, lead_guid, **options):
return self._call('campaign/%s/add' % campaign_guid, data=lead_guid, method='POST', **options)
def unenroll_lead(self, campaign_guid, lead_guid, **options):
| re | turn self._call('campaign/%s/remove' % campaign_guid, data=lead_guid, method='POST', **options)
|
untergeek/es_stats_zabbix | es_stats_zabbix/backend/stat.py | Python | apache-2.0 | 1,693 | 0.002363 | """
Stat module for flask_restful
"""
import json
import logging
from dotmap import DotMap
from flask import request
from flask_restful import Resource
from es_stats_zabbix.exceptions import EmptyResult
from es_stats_zabbix.helpers.utils import status_map
class Stat(Resource):
"""Stat Resource class for flask_restful"""
def __init__(self, statobj):
self.statobj = statobj
# Turn <class 'es_stats.classes.ClusterHealth'> into just "ClusterHealth"
statclass = str(type(statobj)).split('.')[-1].split("'")[0]
self.logger = logging.getLogger('es_stats_zabbix.Stat.{0}'.format(statclass))
def get(self, key):
"""GET method"""
return self.post(key)
def post(self, key):
"""POST method"""
self.logger.debug('request.data contents = {}'.format(request.data))
node = None
if request.data != b'':
# Mu | st decode to 'utf-8' for older versions of Python
json_data = json.loads(request.data.decode('utf-8'))
node = json_data['node'] if 'node' in json_data else None
self.lo | gger.debug('Node = {0} -- key = {1}'.format(node, key))
result = self.statobj.get(key, name=node)
# Remap for `status`
if key == 'status':
result = status_map(result)
if result == DotMap():
return {'message':'ZBX_NOTFOUND'}, 404
if str(result).strip().lower() == 'false':
return 0, 200
if str(result).strip().lower() == 'true':
return 1, 200
if result == '':
self.logger.error('Empty value')
raise EmptyResult('No result received.')
return result, 200
|
piratf/Game_Boxes | lib/div.py | Python | gpl-3.0 | 404 | 0.014851 | # -*- coding: utf-8 -*
# Filename: div.py
|
__author__ = 'Piratf'
from pygame.locals import *
import pygame
class Div(object):
"""small panel in the frame"""
def __init__(self, (width, height), (x, y) = (0, 0)):
# super(Div, self).__init__()
self.width = width
self.height = height
self.x = x;
self.y = y;
self.rect = Rect((x, y), (widt | h, height)) |
JulyKikuAkita/PythonPrac | cs15211/SearchinRotatedSortedArray.py | Python | apache-2.0 | 6,100 | 0.003443 | __source__ = 'https://leetcode.com/problems/search-in-rotated-sorted-array/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/search-in-rotated-sorted-array.py
# Time: O(logn)
# Space: O(1)
# Binary Search
#
# Description: Leetcode # 33. Search in Rotated Sorted Array
#
# Suppose a sorted array is rotated at some pivot unknown to you beforehand.
#
# (i.e., 0 1 2 4 5 6 7 might become 4 5 6 7 0 1 2).
#
# You are given a target value to search. If found in the array return its index, otherwise return -1.
#
# You may assume no duplicate exists in the array.
# Companies
# LinkedIn Bloomberg Uber Facebook Microsoft
# Related Topics
# Binary Search Array
# Similar Questions
# Search in Rotated Sorted Array II Find Minimum in Rotated Sorted Array
#
import unittest
class Solution:
# @param A, a list of integers
# @param target, an integer to be searched
# @return an integer
def search(self, A, target):
low, high = 0, len(A)
while low < high:
mid = low + (high - low) / 2
if A[mid] == target:
return mid
if A[low] <= A[mid]:
if A[low] <= target and target < A[mid]:
high = mid
else:
low= mid + 1
else:
if A[mid] < target and target <= A[high - 1]:
low = mid + 1
else:
high = mid
return -1
class SolutionCC150:
# @param A, a list of integers
# @param target, an integer to be searched
# @return an integer
def search(self, A, target):
low, high = 0, len(A) - 1
while low <= high:
mid = (low + high) / 2
if A[mid] == target:
return mid
elif A[low] <= A[mid]:
if target > A[mid]:
low = mid + 1
elif target >= A[low]:
high = mid - 1
else:
low = mid + 1
elif target < A[mid]:
high = mid - 1
elif target <= A[high]:
low = mid + 1
else:
high = mid -1
return -1
class SolutionOther:
# @param A, a list of integers
# @param target, an integer to be searched
# @return an integer
def search(self, A, target):
begin, end = 0, len(A)-1
while begin <= end:
mid = begin + ((end - begin) >> 1)
if A[mid] == target:
return mid
elif (A[mid] > A[begin] and target >= A[begin] and target < A[mid]) or \
(A[mid] < A[begin] and not (target <= A[end] and target > A[mid])):
end = mid - 1
else:
begin = mid + 1
return -1
class Solution3(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if not nums or len(nums) < 1 :
return -1
start = 0
end = len(nums) - 1
while start + 1 < end:
mid = start + (end - start) / 2
if nums[mid] == target:
return mid
if nums[mid] >= nums[start]: # no =, fail
if target >= nums[start] and target <= nums[mid]:
end = mid
else:
start = mid
else:
if target >= nums[mid] and target <= nums[end]: # no =, fail
start = mid
else:
end = mid
if nums[start] == target:
return start
if nums[end] == target:
return end
return -1
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
print Solution().search([3, 5, 1], 3)
print Solution().search([1], 1)
print Solution().search([4, 5, 6, 7, 0, 1, 2], 5)
print
print SolutionCC150().search([3, 5, 1], 3)
print SolutionCC150().search([1], 1)
print SolutionCC150().search([4, 5, 6, 7, 0, 1, 2], 5)
print SolutionCC150().search([1, 3, 5], 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
Explanation
Let's say nums looks like this: [12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
Because it's not fully sorted, we can't do normal binary search. But here comes the trick:
If target is let's say 14, then we adjust nums to this, where "inf" means infinity:
[12, 13, 14, 15, 16, 17, 18, 19, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf]
If target is le | t's say 7, then we adjust nums to this:
[-inf, -inf, -inf, -inf, | -inf, -inf, -inf, -inf, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
And then we can simply do ordinary binary search.
# 10ms 24.16%
class Solution {
public int search(int[] nums, int target) {
int left = 0;
int right = nums.length - 1;
while (left <= right) {
int mid = left + (right - left) / 2;
if (nums[mid] == target) {
return mid;
}
if (nums[left] <= nums[mid]) { //need to have equal here, otherwise fail,
dunno why cannot at else part ex: [3,1] and target = 1
if (nums[left] < target && target <= nums[mid]) { //here all equal also pass
right = mid - 1;
} else {
left = mid + 1;
}
} else {
if (nums[mid] < target && target <= nums[right]) { //here all equal also pass
left = mid + 1;
} else {
right = mid - 1;
}
}
}
return -1;
}
}
Note: unable to use Collections.binarySearch
public class Solution {
public int search(int[] nums, int target) {
List<Integer> res = new LinkedList<>();
for (int n: nums) res.add(n);
int idx = Collections.binarySearch(res, target);
return idx < 0 ? -1 : idx;
}
}
'''
|
mrambausek/PPFem | ppfem/elements/lagrange_elements.py | Python | gpl-3.0 | 3,551 | 0.002816 | # PPFem: An educational finite element code
# Copyright (C) 2015 Matthias Rambausek
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ppfem.geometry.point import Point
from ppfem.elements.base import ReferenceElement
from ppfem.elements.lagrange_basis import LagrangeBasis
import numpy as np
class LagrangeElement(ReferenceElement):
def __init__(self, degree, dimension=1):
ReferenceElement.__init__(self, degree, dimension)
def interpolate_function(self, function, mapping=None):
"""
This implementation shows the characteristic property of Lagrange Elements!
:param function: a callable f(p) where p is a Point (or a coordinate array of size space_dim()) and the result
is of dimension dimension()
:param mapping: a Mapping instance to compute the "physical" coordinates of a point in reference space
"""
if mapping is not None:
points = mapping.map_points(self.get_support_points())
else:
points = self.get_support_points()
return np.array([function(p.coords()) for p in points])
def function_value(self, dof_values, point):
# first array axis corresponds to basis function!
if self._dimension == 1:
return np.dot(self.basis_function_values(point).reshape(1, self._n_bases), dof_values)
else:
return np.einsum('ijk,ijk->jk', dof_values, self.basis_function_values(point))
def function_gradient(self, dof_values, point, jacobian_inv=None):
# first array axis corresponds to basis fu | nction!
if self._dimension == 1:
return np.dot(self.basis_function_gradients | (point, jacobian_inv=jacobian_inv).reshape(dof_values.shape).T,
dof_values)
elif self.space_dim() > 1:
return np.einsum('ijk,ijkl->jkl',
dof_values,
self.basis_function_gradients(point, jacobian_inv=jacobian_inv))
elif self.space_dim() == 1:
return np.einsum('ijk,ijk->jk',
dof_values,
self.basis_function_gradients(point, jacobian_inv=jacobian_inv))
class LagrangeLine(LagrangeElement):
def __init__(self, degree, dimension=1):
LagrangeElement.__init__(self, degree, dimension=dimension)
def _setup_basis(self):
support_points = self.get_support_points()
self._n_bases = len(support_points)
self._n_dofs = self._n_bases * self._dimension
self._n_internal_dofs = self._n_dofs - 2
self._basis_functions = [LagrangeBasis(support_points, i, dimension=self._dimension)
for i in range(len(support_points))]
def get_support_points(self):
n = self._degree + 1
return [Point(-1), Point(1)] + [Point(-1 + i * 2/(n-1), index=i) for i in range(1, n-1)]
@staticmethod
def space_dim():
return 1
|
lankier/brigantina | runserver.py | Python | gpl-3.0 | 400 | 0.006135 | #!/usr/bin/env | python
# -*- mode: python; coding: utf-8; -*-
import sys, os
# для доступа к файлам данных меняем каталог на тот,
# в котором лежит скрипт запуска
dir = os.path.dirname(os.path.abspath(__file__))
os.chdi | r(dir)
from brigantina import code
# поехали!
code.webapp.run() # stand alone server
|
OssecTN/codejam-2014 | python/magic_trick/__init__.py | Python | gpl-2.0 | 251 | 0.007968 | #!/usr/bin/env python
__version__ = "0.1.0"
__date__ = "2014- | 04-11 1:42:05"
__author__ = "Dhia Abbassi"
__email__ = "dhiaabbassi90@gmail.com"
__license__ = "GPL"
__maintainer__ = ""
__doc__=""" Snip | pet of code that solves <Problem A. Magic Trick> """ |
Hybrid-Cloud/cinder | cinder/volume/drivers/netapp/eseries/library.py | Python | apache-2.0 | 92,586 | 0.000032 | # Copyright (c) 2015 Alex Meade
# Copyright (c) 2015 Rushil Chugh
# Copyright (c) 2015 Navneet Singh
# Copyright (c) 2015 Yogesh Kshirsagar
# Copyright (c) 2015 Jose Porrua
# Copyright (c) 2015 Michael Price
# Copyright (c) 2015 Tom Barron
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import math
import socket
import time
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import versionutils
from oslo_service import loopingcall
from oslo_utils import excutils
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import utils as cinder_utils
from cinder.volume.drivers.netapp.eseries import client
from cinder.volume.drivers.netapp.eseries import exception as eseries_exc
from cinder.volume.drivers.netapp.eseries import host_mapper
from cinder.volume.drivers.netapp.eseries import utils
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
@six.add_metaclass(cinder_utils.TraceWrapperMetaclass)
class NetAppESeriesLibrary(object):
"""Executes commands relating to Volumes."""
DRIVER_NAME = 'NetApp_iSCSI_ESeries'
AUTOSUPPORT_INTERVAL_SECONDS = 3600 # hourly
VERSION = "1.0.0"
REQUIRED_FLAGS = ['netapp_server_hostname', 'netapp_controller_ips',
'netapp_login', 'netapp_password']
SLEEP_SECS = 5
HOST_TYPES = {'factoryDefault': 'FactoryDefault',
'linux_atto': 'LnxTPGSALUA',
'linux_dm_mp': 'LnxALUA',
'linux_mpp_rdac': 'LNX',
'linux_pathmanager': 'LnxTPGSALUA_PM',
'linux_sf': 'LnxTPGSALUA_SF',
'ontap': 'ONTAP_ALUA',
'ontap_rdac': 'ONTAP_RDAC',
'vmware': 'VmwTPGSALUA',
'windows': 'W2KNETNCL',
'windows_atto': 'WinTPGSALUA',
'windows_clustered': 'W2KNETCL',
}
# NOTE(ameade): This maps what is reported by the e-series api to a
# consistent set of values that are reported by all NetApp drivers
# to the cinder scheduler.
SSC_DISK_TYPE_MAPPING = {
'scsi': 'SCSI',
'fibre': 'FCAL',
'sas': 'SAS',
'sata': 'SATA',
'ssd': 'SSD',
}
SSC_RAID_TYPE_MAPPING = {
'raidDiskPool': 'DDP',
'raid0': 'raid0',
'raid1': 'raid1',
# RAID3 is being deprecated and is actually implemented as RAID5
'raid3': 'raid5',
'raid5': 'raid5',
'raid6': 'raid6',
}
READ_CACHE_Q_SPEC = 'netapp:read_cache'
WRITE_CACHE_Q_SPEC = 'netapp:write_cache'
DA_UQ_SPEC = 'netapp_eseries_data_assurance'
FLASH_CACHE_UQ_SPEC = 'netapp_eseries_flash_read_cache'
DISK_TYPE_UQ_SPEC = 'netapp_disk_type'
ENCRYPTION_UQ_SPEC = 'netapp_disk_encryption'
SPINDLE_SPD_UQ_SPEC = 'netapp_eseries_disk_spindle_speed'
RAID_UQ_SPEC = 'netapp_raid_type'
THIN_UQ_SPEC = 'netapp_thin_provisioned'
SSC_UPDATE_INTERVAL = 60 # seconds
SA_COMM_TIMEOUT = 30
WORLDWIDENAME = 'worldWideName'
DEFAULT_HOST_TYPE = 'linux_dm_mp'
DEFAULT_CHAP_USER_NAME = 'eserieschapuser'
# Define name marker string to use in snapshot groups that are for copying
# volumes. This is to differentiate them from ordinary snapshot groups.
SNAPSHOT_VOL_COPY_SUFFIX = 'SGCV'
# Define a name marker string used to identify snapshot volumes that have
# an underlying snapshot that is awaiting deletion.
SNAPSHOT_VOL_DEL_SUFFIX = '_DEL'
# Maximum number of s | napshots per snapshot group
MAX_SNAPSHOT_COUNT = 32
# | Maximum number of snapshot groups
MAX_SNAPSHOT_GROUP_COUNT = 4
RESERVED_SNAPSHOT_GROUP_COUNT = 1
SNAPSHOT_PERSISTENT_STORE_KEY = 'cinder-snapshots'
SNAPSHOT_PERSISTENT_STORE_LOCK = str(uuid.uuid4())
def __init__(self, driver_name, driver_protocol="iSCSI",
configuration=None, **kwargs):
self.configuration = configuration
self._app_version = kwargs.pop("app_version", "unknown")
self.configuration.append_config_values(na_opts.netapp_basicauth_opts)
self.configuration.append_config_values(
na_opts.netapp_connection_opts)
self.configuration.append_config_values(na_opts.netapp_transport_opts)
self.configuration.append_config_values(na_opts.netapp_eseries_opts)
self.configuration.append_config_values(na_opts.netapp_san_opts)
self.lookup_service = fczm_utils.create_lookup_service()
self._backend_name = self.configuration.safe_get(
"volume_backend_name") or "NetApp_ESeries"
self.driver_name = driver_name
self.driver_protocol = driver_protocol
self._stats = {}
self._ssc_stats = {}
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
self.context = context
na_utils.check_flags(self.REQUIRED_FLAGS, self.configuration)
self._client = self._create_rest_client(self.configuration)
self._check_mode_get_or_register_storage_system()
self._version_check()
if self.configuration.netapp_enable_multiattach:
self._ensure_multi_attach_host_group_exists()
def _create_rest_client(self, configuration):
port = configuration.netapp_server_port
scheme = configuration.netapp_transport_type.lower()
if port is None:
if scheme == 'http':
port = 8080
elif scheme == 'https':
port = 8443
return client.RestClient(
scheme=scheme,
host=configuration.netapp_server_hostname,
port=port,
service_path=configuration.netapp_webservice_path,
username=configuration.netapp_login,
password=configuration.netapp_password)
def _version_check(self):
"""Ensure that the minimum version of the REST API is available"""
if not self._client.features.REST_1_4_RELEASE:
min_version = (
self._client.features.REST_1_4_RELEASE.minimum_version)
raise exception.NetAppDriverException(
'This version (%(cur)s of the NetApp SANtricity Webservices '
'Proxy is not supported. Install version %(supp)s or '
'later.' % {'cur': self._client.api_version,
'supp': min_version})
def _start_periodic_tasks(self):
ssc_periodic_task = loopingcall.FixedIntervalLoopingCall(
self._update_ssc_info)
ssc_periodic_task.start(interval=self.SSC_UPDATE_INTERVAL)
# Start the task that logs autosupport (ASUP) data to the controller
asup_periodic_task = loopingcall.FixedIntervalLoopingCall(
self._create_asup, CONF.host)
asup_periodic_task.start(interval=self.AUTOSUPPORT_INTERVAL_SECONDS,
initial_delay=0)
def check_for_setup_error(self):
self._check_host_type()
self._check_multipath()
# It is important that this be called before any other methods that
# interact with the storage-system. It blocks until the
# storage-system comes online.
self._check_storage_system()
self._check_pools()
self._start_periodic_tasks()
def _check_host_type(self):
"""Validate that |
bwinton/svg-magic | magic.py | Python | mpl-2.0 | 14,704 | 0.000272 | #! /usr/bin/env python
# coding=utf-8
import argparse
from clint.textui.colored import red, green, blue
from clint.textui import progress
import copy
import csv
import io
from lxml import etree
import os
import subprocess
import sys
from time import sleep
SPRITESHEET_SVG = io.BytesIO('''<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1"
xmlns="http://www.w3.org/2000/svg"
x="0"
y="0">
</svg>''')
EXTRA_CLASSES = '{http://www.w3.org/2000/svg}extra-classes'
EXTRA_THEME = '{http://www.w3.org/2000/svg}extra-theme'
EXTRA_SCALE = '{http://www.w3.org/2000/svg}extra-scale'
EXTRA_TAGS = [EXTRA_CLASSES, EXTRA_THEME, EXTRA_SCALE]
class Usage(Exception):
def __init__(self, msg, data):
self.msg = msg
self.data = data
def processManifest(args):
manifestPath = os.path.join(args.baseDir, 'sprites.mf')
if not os.path.exists(manifestPath):
raise Usage('Manifest not found at %s.' %
(red(manifestPath, bold=True)),
(manifestPath,))
lineCount = len(open(manifestPath).readlines())
manifest = csv.DictReader(open(manifestPath), skipinitialspace=True)
manifest.fieldnames = ['filename', 'spritesheet']
spritesheets = {}
for line in progress.bar(manifest,
label='Reading Manifest: ',
expected_size=lineCount):
sheet = line['spritesheet']
image = line['filename']
imagePath = os.path.join(args.baseDir, image)
if not os.path.exists(imagePath):
raise Usage('Image not found at %s from %s, %s.' %
(red(imagePath, bold=True),
blue(manifestPath, bold=True),
blue('line ' + str(manifest.line_num), bold=True)),
(imagePath, manifestPath, manifest.line_num))
spritesheets.setdefault(sheet, Spritesheet(sheet)).addImage(image)
return spritesheets.values()
class Image(object):
def __init__(self, name, path):
self.name = name.replace('.svg', '')
self.path = path
self.hasClass = False
self.tree = etree.parse(self.path)
self.parseTree()
def __str__(self):
return 'Image<' + self.path + '>'
def __repr__(self):
return self.__str__()
def parseTree(self):
# Get (and remove) the use statements.
self.uses = self.tree.findall('{http://www.w3.org/2000/svg}use')
for use in self.uses:
use.getparent().remove(use)
self.uses = [use.attrib['{http://www.w3.org/1999/xlink}href']
for use in self.uses]
# Get the one remaining child;
self.child = self.tree.getroot().getchildren()
if len(self.child) != 1:
raise Usage('More than one child in %s.' %
(red(self.path, bold=True),),
(self.path,))
self.child = self.child[0]
# And the width/height, and maybe the viewbox.
attrib = self.tree.getroot().attrib
self.width = int(attrib['width'])
self.height = int(attrib['height'])
class Spritesheet(object):
def __init__(self, name, images=None):
self.name = name
if images is None:
images = []
self.images = images
self.isTheme = False
| def __str__(self):
| rv = 'Spritesheet<' + self.name
if self.isTheme:
rv += ':theme'
rv += '>'
return rv
def __repr__(self):
return self.__str__()
def addImage(self, image):
self.images.append(image)
def loadStyle(self, style):
data = open(style).read()
return etree.CDATA('\n' + data + '\n')
def loadDef(self, filename):
tree = etree.parse(filename)
defs = tree.getroot().getchildren()
return defs
def getAlternates(self, image):
alternates = [image]
seenClasses = set()
for use in image.uses:
for element in self.usemap[use]:
if element.tag == EXTRA_CLASSES:
classes = element.attrib['value'].split()
for classname in classes:
if classname not in seenClasses:
seenClasses.add(classname)
alternate = copy.deepcopy(image)
alternate.child.attrib['class'] = classname
alternate.child.attrib['id'] += '-' + classname
alternate.name += '-' + classname
alternate.hasClass = True
alternates.append(alternate)
return alternates
def getStyles(self, uses):
styles = []
styleNames = set()
for use in uses:
tree = etree.parse(use)
pi = tree.getroot()
while pi is not None:
if (isinstance(pi, etree._ProcessingInstruction) and
pi.target == 'xml-stylesheet' and
pi.attrib['type'] == 'text/css' and
pi.attrib['href'] not in styleNames):
styles.append(pi.attrib['href'])
styleNames.add(pi.attrib['href'])
pi = pi.getprevious()
return styles
def getThemes(self, uses):
themes = {}
for use in uses:
for element in use:
if (element.tag == EXTRA_THEME):
# <extra-theme name="luna-blue" href="luna-blue.css"/>
themes[element.attrib['name']] = element.attrib['href']
return themes
def getScales(self, uses):
scales = {'': '1'}
for use in uses:
for element in use:
if (element.tag == EXTRA_SCALE):
# <extra-scale name="Retina" value="2"/>
scales[element.attrib['name']] = element.attrib['value']
return scales
def getVariants(self, variant):
new = Spritesheet(self.name)
styles = set()
uses = set()
for image in self.images:
newImage = Image(image, variant.getFile(image))
uses.update(newImage.uses)
# print newImage.name, newImage.width, newImage.height
# Get the stylesheets…
styles = self.getStyles(variant.getFile(use) for use in uses)
new.styles = [self.loadStyle(variant.getDefsFile(style))
for style in styles]
new.usemap = {use: self.loadDef(variant.getDefsFile(use))
for use in uses}
new.uses = [new.usemap[use] for use in sorted(uses)]
for image in self.images:
newImage = Image(image, variant.getFile(image))
new.images.extend(new.getAlternates(newImage))
variants = [new]
new.scales = self.getScales(new.uses)
new.themes = self.getThemes(new.uses)
for theme, style in new.themes.items():
alternate = copy.copy(new)
alternate.name += '-' + theme
alternate.isTheme = True
alternate.scales = copy.copy(new.scales)
alternate.styles = copy.copy(new.styles)
style = variant.getDefsFile(style)
alternate.styles.append(alternate.loadStyle(style))
variants.append(alternate)
return variants
def write(self, output, png):
imageExt = 'svg'
if png:
imageExt = 'png'
tree = etree.parse(SPRITESHEET_SVG)
root = tree.getroot()
root.text = '\n\n '
for style in self.styles:
styleElem = etree.Element('style')
styleElem.text = style
styleElem.tail = '\n\n '
root.append(styleElem)
for use in self.uses:
use = [element for element in use if element.tag not in EXTRA_TAGS]
if len(use):
use[-1].tail = '\n\n '
root.extend(use)
height = 0
width = 0
|
denz/geophys | gnss/data/satellite.py | Python | gpl-3.0 | 1,759 | 0.000569 | from gnss.base import Blocks, BlocksRegistry
__all__ = []
# | class RinexD(Blocks, metaclass=BlocksRegistry):
# '''Hatanaka compressed GNSS observation'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-d')
# class RinexN(Blocks, metaclass=BlocksRegistry):
# '''GPS navigation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-n')
# class RinexG(Blocks, metaclass=BlocksRegistry):
# ' | ''GLONASS navigation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-g')
# class RinexL(Blocks, metaclass=BlocksRegistry):
# '''Galileo navigation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-l')
# class RinexQ(Blocks, metaclass=BlocksRegistry):
# '''QZSS navigation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-q')
# class RinexP(Blocks, metaclass=BlocksRegistry):
# '''mixed GNSS navigation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-p')
# class RinexM(Blocks, metaclass=BlocksRegistry):
# '''meteorological observation file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-m')
# class RinexS(Blocks, metaclass=BlocksRegistry):
# '''GNSS observation summary file'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.rinex-s')
# class Brdc(Blocks, metaclass=BlocksRegistry):
# '''daily GPS or GLONASS broadcast ephemerides'''
# @classmethod
# def valid_filetype(self, src):
# return src.endswith('.brdc') |
androidx/androidx | leanback/leanback/generatef.py | Python | apache-2.0 | 6,367 | 0.007068 | #!/usr/bin/env python
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import re
print "Generate framework fragment related code for leanback"
cls = ['Base', 'BaseRow', 'Browse', 'Details', 'Error', 'Headers',
'Playback', 'Rows', 'Search', 'VerticalGrid', 'Branded',
'GuidedStep', 'Onboarding', 'Video']
for w in cls:
print "copy {}SupportFragment to {}Fragment".format(w, w)
file = open('src/main/java/androidx/leanback/app/{}SupportFragment.java'.format(w), 'r')
content = "// CHECKSTYLE:OFF Generated code\n"
content = content + "/* This file is auto-generated from {}SupportFragment.java. DO NOT MODIFY. */\n\n".format(w)
for line in file:
line = line.replace('IS_FRAMEWORK_FRAGMENT = false', 'IS_FRAMEWORK_FRAGMENT = true');
for w2 in cls:
line = line.replace('{}SupportFragment'.format(w2), '{}Fragment'.format(w2))
line = line.replace('androidx.fragment.app.FragmentActivity', 'android.app.Activity')
line = line.replace('androidx.fragment.app.Fragment', 'android.app.Fragment')
line = line.replace('activity.getSupportFragmentManager()', 'activity.getFragmentManager()')
line = line.replace('FragmentActivity activity', 'Activity activity')
line = line.replace('FragmentActivity#onBackPressed', 'Activity#onBackPressed')
line = line.replace('(FragmentActivity', '(Activity')
line = line.replace('setEnterTransition(enterTransition)', 'setEnterTransition((android.transition.Transition) enterTransition)');
line = line.replace('setSharedElementEnterTransition(sharedElementTransition)', 'setSharedElementEnterTransition((android.transition.Transition) sharedElementTransition)');
line = line.replace('setExitTransition(exitTransition)', 'setExitTransition((android.transition.Transition) exitTransition)');
line = line.replace('requestPermissions(new', 'PermissionHelper.requestPermissions(SearchFragment.this, new');
# replace getContext() with FragmentUtil.getContext(XXXFragment.this), but dont match the case "view.getContext()"
line = re.sub(r'([^\.])getContext\(\)', r'\1FragmentUtil.getContext({}Fragment.this)'.format(w), line);
content = content + line
file.close()
# add deprecated tag to fragment class and inner classes/interfaces
content = re.sub(r'\*\/\n(@.*\n|)(public |abstract public |abstract |)class', '* @deprecated use {@link ' + w + 'SupportFragment}\n */\n@Deprecated\n\\1\\2class', content)
content = re.sub(r'\*\/\n public (static class|interface|final static class|abstract static class)', '* @deprecated use {@link ' + w + 'SupportFragment}\n */\n @Deprecated\n public \\1', content)
outfile = open('src/main/java/androidx/leanback/app/{}Fragment.java'.format(w), 'w')
outfile.write(content)
outfile.close()
print "copy VideoSupportFragmentGlueHost to VideoFragmentGlueHost"
file = open('src/main/java/androidx/leanback/app/VideoSupportFragmentGlueHost.java', 'r')
content = "// CHECKSTYLE:OFF Generated code\n"
content = content + "/* This file is auto-generated from VideoSupportFragmentGlueHost.java. DO NOT MODIFY. */\n\n"
for line in file:
line = line.replace('androidx.fragment.app.Fragment', 'android.app.Fragment')
line = line.replace('VideoSupportFragment', 'VideoFragment')
line = line.replace('PlaybackSupportFragment', 'PlaybackFragment')
content = content + line
file.close()
# add deprecated tag to class
content = re.sub(r'\*\/\npublic class', '* @deprecated use {@link VideoSupportFragmentGlueHost}\n */\n@Deprecated\npublic class', content)
outfile = open('src/main/java/androidx/leanback/app/VideoFragmentGlueHost.java', 'w')
outfile.write(content)
outfile.close()
print "copy PlaybackSupportFragmentGlueHost to PlaybackFragmentGlueHost"
file = open('src/main/java/androidx/leanback/app/PlaybackSupportFragmentGlueHost.java', 'r')
content = "// CHECKSTYLE:OFF Generated code\n"
content = content + "/* This file is auto-generated from {}PlaybackSupportFragmentGlueHost.java. DO NOT MODIFY. */\n\n"
for line in file:
line = line.replace('VideoSupportFragment', 'VideoFragment')
line = line.replace('PlaybackSupportFragment', 'PlaybackFragment')
line = line.replace('androidx.fragment.app.Fragment', 'android.app.Fragment')
content = content + line
file.close()
# add deprecated tag to class
content = re.sub(r'\*\/\npublic class', '* @deprecated use {@link PlaybackSupportFragmentGlueHost}\n */\n@Deprecated\npublic class', content)
outfile = open('src/main/java/androidx/leanback/app/PlaybackFragmentGlueHost.java', 'w')
outfile.write(content)
outfile.close()
print "copy DetailsSupportFragmentBackgroundController to DetailsFragmentBackgroundController"
file = open('src/main/java/androidx/leanback/app/DetailsSupportFragmentBackgroundController.java', 'r')
content = "// CHECKSTYLE:OFF Generated code\n"
content = content + "/* This file is auto-generated from {}DetailsSupportFragmentBackgroundController.java. DO NOT MODIFY. */\n\n"
for line in file:
line = line.replace('VideoSupportFragment', 'VideoFragment')
line = line.replace('DetailsSupportFragment', 'DetailsFragment')
line = line.replace('RowsSupportFragment', 'RowsFragment')
line = line.replace('androidx.fragment.app.Fragment', 'android.app.Fragment')
line = line.replace('mFragment.getContext()', 'FragmentUtil.getContext(mFragment)')
content = content + line
file.close()
# add deprecated tag to class
content = re.sub(r'\*\/\npublic class', '* @deprecated use {@link DetailsSupportFragmentBackgroundController}\n */\n@Deprecated\npublic class', content)
outfile = open('src/main/java/androidx/leanback/app/DetailsFragmentBackgroundController.java', 'w')
outfile.wr | ite(content)
outfil | e.close()
|
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/scipy/odr/odrpack.py | Python | gpl-3.0 | 41,283 | 0.000654 | """
Python wrappers for Orthogonal Distance Regression (ODRPACK).
Notes
=====
* Array formats -- FORTRAN stores its arrays in memory column first, i.e. an
array element A(i, j, k) will be next to A(i+1, j, k). In C and, consequently,
NumPy, arrays are stored row first: A[i, j, k] is next to A[i, j, k+1]. For
efficiency and convenience, the input and output arrays of the fitting
function (and its Jacobians) are passed to FORTRAN without transposition.
Therefore, where the ODRPACK documentation says that the X array is of shape
(N, M), it will be passed to the Python function as an array of shape (M, N).
If M==1, the one-dimensional case, then nothing matters; if M>1, then your
Python functions will be dealing with arrays that are indexed in reverse of
the ODRPACK documentation. No real biggie, but watch out for your indexing of
the Jacobians: the i,j'th elements (@f_i/@x_j) evaluated at the n'th
observation will be returned as jacd[j, i, n]. Except for the Jacobians, it
really is easier to deal with x[0] and x[1] than x[:,0] and x[:,1]. Of course,
you can always use the transpose() function from scipy explicitly.
* Examples -- See the accompanying file test/test.py for examples of how to set
up fits of your own. Some are taken from the User's Guide; some are from
other sources.
* Models -- Some common models are instantiated in the accompanying module
models.py . Contributions are welcome.
Credits
=======
* Thanks to Arnold Moene and Gerard Vermeulen for fixing some killer bugs.
Robert Kern
robert.kern@gmail.com
"""
from __future__ import division, print_function, absolute_import
import numpy
from warnings import warn
from scipy.odr import __odrpack
__all__ = ['odr', 'OdrWarning', 'OdrError', 'OdrStop',
'Data', 'RealData', 'Model', 'Output', 'ODR',
'odr_error', 'odr_stop']
odr = __odrpack.odr
class OdrWarning(UserWarning):
"""
Warning indicating that the data passed into
ODR will cause problems when passed into 'odr'
that the user should be aware of.
"""
pass
class OdrError(Exception):
"""
Exception indicating an error in fitting.
This is raised by `scipy.odr` if an error occurs during fitting.
"""
pass
class OdrStop(Exception):
"""
Exception stopping fitting.
You can raise this exception in your objective function to tell
`scipy.odr` to stop fitting.
"""
pass
# Backwards compatibility
odr_error = OdrError
odr_stop = OdrStop
__odrpack._set_exceptions(OdrError, OdrStop)
def _conv(obj, dtype=None):
""" Convert an object to the preferred form for input to the odr routine.
"""
if obj is None:
return obj
else:
if dtype is None:
obj = numpy.asarray(obj)
else:
obj = numpy.asarray(obj, dtype)
if obj.shape == ():
# Scalar.
return obj.dtype.type(obj)
else:
return obj
def _report_error(info):
""" Interprets the return code of the odr routine.
Parameters
----------
info : int
The return code of the odr routine.
Returns
-------
problems : list(str)
A list of messages about why the odr() routine stopped.
"""
stopreason = ('Blank',
'Sum of squares convergence',
'Parameter convergence',
'Both sum of squares and parameter convergence',
'Iteration limit reached')[info % 5]
if info >= 5:
# questionable results or fatal error
I = (info//10000 % 10,
info//1000 % 10,
info//100 % 10,
info//10 % 10,
info % 10)
problems = []
if I[0] == 0:
if I[1] != 0:
problems.append('Derivatives possibly not correct')
if I[2] != 0:
problems.append('Error occurred in callback')
if I[3] != 0:
problems.append('Problem is not full rank at solution')
problems.append(stopreason)
elif I[0] == 1:
if I[1] != 0:
| problems.appe | nd('N < 1')
if I[2] != 0:
problems.append('M < 1')
if I[3] != 0:
problems.append('NP < 1 or NP > N')
if I[4] != 0:
problems.append('NQ < 1')
elif I[0] == 2:
if I[1] != 0:
problems.append('LDY and/or LDX incorrect')
if I[2] != 0:
problems.append('LDWE, LD2WE, LDWD, and/or LD2WD incorrect')
if I[3] != 0:
problems.append('LDIFX, LDSTPD, and/or LDSCLD incorrect')
if I[4] != 0:
problems.append('LWORK and/or LIWORK too small')
elif I[0] == 3:
if I[1] != 0:
problems.append('STPB and/or STPD incorrect')
if I[2] != 0:
problems.append('SCLB and/or SCLD incorrect')
if I[3] != 0:
problems.append('WE incorrect')
if I[4] != 0:
problems.append('WD incorrect')
elif I[0] == 4:
problems.append('Error in derivatives')
elif I[0] == 5:
problems.append('Error occurred in callback')
elif I[0] == 6:
problems.append('Numerical error detected')
return problems
else:
return [stopreason]
class Data(object):
"""
The data to fit.
Parameters
----------
x : array_like
Observed data for the independent variable of the regression
y : array_like, optional
If array-like, observed data for the dependent variable of the
regression. A scalar input implies that the model to be used on
the data is implicit.
we : array_like, optional
If `we` is a scalar, then that value is used for all data points (and
all dimensions of the response variable).
If `we` is a rank-1 array of length q (the dimensionality of the
response variable), then this vector is the diagonal of the covariant
weighting matrix for all data points.
If `we` is a rank-1 array of length n (the number of data points), then
the i'th element is the weight for the i'th response variable
observation (single-dimensional only).
If `we` is a rank-2 array of shape (q, q), then this is the full
covariant weighting matrix broadcast to each observation.
If `we` is a rank-2 array of shape (q, n), then `we[:,i]` is the
diagonal of the covariant weighting matrix for the i'th observation.
If `we` is a rank-3 array of shape (q, q, n), then `we[:,:,i]` is the
full specification of the covariant weighting matrix for each
observation.
If the fit is implicit, then only a positive scalar value is used.
wd : array_like, optional
If `wd` is a scalar, then that value is used for all data points
(and all dimensions of the input variable). If `wd` = 0, then the
covariant weighting matrix for each observation is set to the identity
matrix (so each dimension of each observation has the same weight).
If `wd` is a rank-1 array of length m (the dimensionality of the input
variable), then this vector is the diagonal of the covariant weighting
matrix for all data points.
If `wd` is a rank-1 array of length n (the number of data points), then
the i'th element is the weight for the i'th input variable observation
(single-dimensional only).
If `wd` is a rank-2 array of shape (m, m), then this is the full
covariant weighting matrix broadcast to each observation.
If `wd` is a rank-2 array of shape (m, n), then `wd[:,i]` is the
diagonal of the covariant weighting matrix for the i'th observation.
If `wd` is a rank-3 array of shape (m, m, n), then `wd[:,:,i]` is the
full specification of the covariant weighting matrix for each
observation.
fix : array_like of ints, optional
The `fix` argument is the same as ifixx in the class ODR. It is |
joaduo/mepinta | core/python_core/mepinta/testing/plugins_testing/ModuleAutoTesterUtil.py | Python | gpl-3.0 | 990 | 0.00101 | # -*- coding: utf-8 -*-
'''
Mepinta
Copyright (c) 2011-2012, Joaquin G. Duo
This file is part o | f Mepinta.
Mepinta is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Mepinta is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied wa | rranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Mepinta. If not, see <http://www.gnu.org/licenses/>.
'''
from mepinta.testing.plugins_testing.base import ModuleAutoTesterBase
class ModuleAutoTesterUtil(ModuleAutoTesterBase):
pass
def testModule():
from getDefaultContext import getDefaultContext
context = getDefaultContext()
if __name__ == "__main__":
testModule()
|
redhat-openstack/ironic | ironic/common/keystone.py | Python | apache-2.0 | 5,538 | 0 | # coding=utf-8
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import exceptions as ksexception
from oslo_concurrency import lockutils
from oslo_config import cfg
from six.moves.urllib import parse
from ironic.common import exception
from ironic.common.i18n import _
CONF = cfg.CONF
keystone_opts = [
cfg.StrOpt('region_name',
help=_('The region used for getting endpoints of OpenStack'
'services.')),
]
CONF.register_opts(keystone_opts, group='keystone')
CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
_KS_CLIENT = None
def _is_apiv3(auth_url, auth_version):
"""Checks if V3 version of API is being used or not.
This method inspects auth_url and auth_version, and checks whether V3
version of the API is being used or not.
:param auth_url: a http or https url to be inspected (like
'http://127.0.0.1:9898/').
:param auth_version: a string containing the version (like 'v2', 'v3.0')
:returns: True if V3 of the API is being used.
"""
return auth_version == 'v3.0' or '/v3' in parse.urlparse(auth_url).path
def _get_ksclient(token=None):
auth_url = CONF.keystone_authtoken.auth_uri
if not auth_url:
raise exception.KeystoneFailure(_('Keystone API endpoint is missing'))
auth_version = CONF.keystone_authtoken.auth_version
api_v3 = _is_apiv3(auth_url, auth_version)
if api_v3:
from keystoneclient.v3 import client
else:
from keystoneclient.v2_0 import client
auth_url = get_keystone_url(auth_url, auth_version)
try:
if token:
return client.Client(token=token, auth_url=auth_url)
else:
params = {'username': CONF.keystone_authtoken.admin_user,
'password': CONF.keystone_authtoken.admin_password,
'tenant_name': CONF.keystone_authtoken.admin_tenant_name,
'region_name': CONF.keystone.region_name,
'auth_url': auth_url}
return _get_ksclient_from_conf(client, **params)
except ksexception.Unauthorized:
raise exception.KeystoneUnauthorized()
except ksexception.AuthorizationFailure as err:
raise exception.KeystoneFailure(_('Could not authorize in Keystone:'
' %s') % err)
@lockutils.synchronized('keystone_client', 'ironic-')
def _get_ksclient_from_conf(client, **params):
global _KS_CLIENT
# NOTE(yuriyz): use Keystone client default gap, to determine whether the
# given token is about to expire
if _KS_CLIENT is None or _KS_CLIENT.auth_ref.will_expire_soon():
_KS_CLIENT = client.Client(**params)
return _KS_CLIENT
def get_keystone_url(auth_url, auth_version):
"""Gives an http/https url to contact keystone.
Given an auth_url and auth_version, this method generates the url in
which keystone can be reached.
:param auth_url: a http or https url to be inspected (like
'http://127.0.0.1:9898/').
:param auth_version: a string containing the version (like v2, v3.0, etc)
:returns: a string containing the keystone url
"""
api_v3 = _is_apiv3(auth_url, auth_version)
a | pi_version = 'v3' if api_v3 else 'v2.0'
# NOTE(lucasagomes): Get rid of the trailing '/' otherwise urljoin()
# fails to override the version in the URL
return parse.urljoin(auth_url.rstrip('/'), api_version)
def get_service_url(service_type='baremetal', endpoint | _type='internal'):
"""Wrapper for get service url from keystone service catalog.
Given a service_type and an endpoint_type, this method queries keystone
service catalog and provides the url for the desired endpoint.
:param service_type: the keystone service for which url is required.
:param endpoint_type: the type of endpoint for the service.
:returns: an http/https url for the desired endpoint.
"""
ksclient = _get_ksclient()
if not ksclient.has_service_catalog():
raise exception.KeystoneFailure(_('No Keystone service catalog '
'loaded'))
try:
endpoint = ksclient.service_catalog.url_for(
service_type=service_type,
endpoint_type=endpoint_type,
region_name=CONF.keystone.region_name)
except ksexception.EndpointNotFound:
raise exception.CatalogNotFound(service_type=service_type,
endpoint_type=endpoint_type)
return endpoint
def get_admin_auth_token():
"""Get an admin auth_token from the Keystone."""
ksclient = _get_ksclient()
return ksclient.auth_token
def token_expires_soon(token, duration=None):
"""Determines if token expiration is about to occur.
:param duration: time interval in seconds
:returns: boolean : true if expiration is within the given duration
"""
ksclient = _get_ksclient(token=token)
return ksclient.auth_ref.will_expire_soon(stale_duration=duration)
|
destijl/grr | grr/client/stdlib.py | Python | apache-2.0 | 8,367 | 0.045417 | #!/usr/bin/env python
"""This file imports the Python std lib so it can be used by components."""
# pylint: disable=g-import-not-at-top, unused-import, using-constant-test
if False:
import BaseHTTPServer
import CGIHTTPServer
import ConfigParser
import Cookie
import DocXMLRPCServer
import HTMLParser
import MimeWriter
import Queue
import SimpleHTTPServer
import SimpleXMLRPCServer
import SocketServer
import StringIO
import UserDict
import UserList
import UserString
import _LWPCookieJar
import _abcoll
# https://github.com/pyinstaller/pyinstaller/issues/1425
import _cffi_backend
import _osx_support
import _pyio
import _strptime
import _sysconfigdata
import _threading_local
import _weakrefset
import abc
import aifc
import anydbm
import argparse
import ast
import asynchat
import asyncore
import atexit
import audiodev
import base64
import b | db
import binhex
import bisect
import bsddb.db
import bsddb.dbobj
import bsddb.dbrecio
import bsddb.dbshelve
import bsddb.dbtables
import bsddb.dbutils
import cProfile
import calendar
import cgi
import cgitb
import chunk
import | cmd
import code
import codecs
import codeop
import collections
import colorsys
import commands
import compileall
import compiler.ast
import compiler.consts
import compiler.future
import compiler.misc
import compiler.pyassem
import compiler.pycodegen
import compiler.symbols
import compiler.syntax
import compiler.transformer
import compiler.visitor
import contextlib
import cookielib
import copy
import copy_reg
import csv
import ctypes._endian
import ctypes.util
import curses.ascii
import curses.has_key
import curses.panel
import curses.textpad
import curses.wrapper
import dbhash
import decimal
import difflib
import dircache
import dis
import distutils.archive_util
import distutils.bcppcompiler
import distutils.ccompiler
import distutils.cmd
import distutils.command.bdist
import distutils.command.bdist_dumb
import distutils.command.bdist_rpm
import distutils.command.bdist_wininst
import distutils.command.build
import distutils.command.build_clib
import distutils.command.build_ext
import distutils.command.build_py
import distutils.command.build_scripts
import distutils.command.check
import distutils.command.clean
import distutils.command.config
import distutils.command.install
import distutils.command.install_data
import distutils.command.install_egg_info
import distutils.command.install_headers
import distutils.command.install_lib
import distutils.command.install_scripts
import distutils.command.register
import distutils.command.sdist
import distutils.command.upload
import distutils.config
import distutils.core
import distutils.cygwinccompiler
import distutils.debug
import distutils.dep_util
import distutils.dir_util
import distutils.dist
import distutils.emxccompiler
import distutils.errors
import distutils.extension
import distutils.fancy_getopt
import distutils.file_util
import distutils.filelist
import distutils.log
import distutils.spawn
import distutils.sysconfig
import distutils.text_file
import distutils.unixccompiler
import distutils.util
import distutils.version
import distutils.versionpredicate
import doctest
import dumbdbm
import dummy_thread
import dummy_threading
import email._parseaddr
import email.base64mime
import email.charset
import email.encoders
import email.errors
import email.feedparser
import email.generator
import email.header
import email.iterators
import email.message
import email.mime.application
import email.mime.audio
import email.mime.base
import email.mime.image
import email.mime.message
import email.mime.multipart
import email.mime.nonmultipart
import email.mime.text
import email.parser
import email.quoprimime
import email.utils
import filecmp
import fileinput
import fnmatch
import formatter
import fpformat
import fractions
import ftplib
import functools
import genericpath
import getopt
import getpass
import gettext
import glob
import gzip
import hashlib
import heapq
import hmac
import hotshot.log
import hotshot.stats
import hotshot.stones
import htmlentitydefs
import htmllib
import httplib
import ihooks
import imaplib
import imghdr
import imputil
import inspect
import io
import json.decoder
import json.encoder
import json.scanner
import json.tool
import keyword
import linecache
import locale
import logging.config
import logging.handlers
import macpath
import macurl2path
import mailbox
import mailcap
import markupbase
import md5
import mhlib
import mimetools
import mimetypes
import mimify
import modulefinder
import multifile
import multiprocessing.connection
import multiprocessing.dummy.connection
import multiprocessing.forking
import multiprocessing.heap
import multiprocessing.managers
import multiprocessing.pool
import multiprocessing.process
import multiprocessing.queues
import multiprocessing.reduction
import multiprocessing.sharedctypes
import multiprocessing.synchronize
import multiprocessing.util
import mutex
import netrc
import new
import nntplib
import ntpath
import nturl2path
import numbers
import opcode
import optparse
import os
import os2emxpath
import pdb
import pickle
import pickletools
import pipes
import pkgutil
import platform
import plistlib
import popen2
import poplib
import posixfile
import posixpath
import pprint
import profile
import pstats
import pty
import py_compile
import pyclbr
import pydoc
import pydoc_data.topics
import quopri
import random
import re
import rfc822
import rlcompleter
import robotparser
import runpy
import sched
import sets
import sgmllib
import sha
import shelve
import shlex
import shutil
import site
import smtpd
import smtplib
import sndhdr
import socket
import sqlite3.dbapi2
import sqlite3.dump
import sre
import sre_compile
import sre_constants
import sre_parse
import ssl
import stat
import statvfs
import string
import stringold
import stringprep
import struct
import subprocess
import sunau
import sunaudio
import symbol
import symtable
import sysconfig
import tabnanny
import tarfile
import telnetlib
import tempfile
import test.pystone
import test.regrtest
import test.test_support
import textwrap
import this
import threading
import timeit
import toaiff
import token
import tokenize
import trace
import traceback
import tty
import types
import urllib
import urllib2
import urlparse
import user
import uu
import uuid
import warnings
import wave
import weakref
import whichdb
import wsgiref.handlers
import wsgiref.headers
import wsgiref.simple_server
import wsgiref.util
import wsgiref.validate
import xdrlib
import xml.dom.NodeFilter
import xml.dom.domreg
import xml.dom.expatbuilder
import xml.dom.minicompat
import xml.dom.minidom
import xml.dom.pulldom
import xml.dom.xmlbuilder
import xml.etree.ElementInclude
import xml.etree.ElementPath
import xml.etree.ElementTree
import xml.etree.cElementTree
import xml.parsers.expat
import xml.sax._exceptions
import xml.sax.expatreader
import xml.sax.handler
import xml.sax.saxutils
import xml.sax.xmlreader
import xmllib
import xmlrpclib
import zipfile
# lib-dynload
import audioop
import _bsddb
import bz2
import _codecs_cn
import _codecs_hk
import _codecs_iso2022
import _codecs_jp
import _codecs_kr
import _codecs_tw
import crypt
import _csv
import _ctypes_test
import _ctypes
import _curses_panel
import _curses
import datetime
import dbm
import _elementtree
import fpectl
import future_builtins
import gdbm
import _hashlib
import _hotshot
import _json
import linuxaudiodev
import _lsprof
import mmap
import _multibytecodec
import _multiprocessing
|
foursquare/commons-old | src/python/twitter/common/process/__init__.py | Python | apache-2.0 | 1,910 | 0.004712 | __author__ = 'Brian Wickman'
from process_provider_ps import ProcessProvider_PS
from process_provider_procfs import ProcessProvider_Procfs
class ProcessProviderFactory(object):
"""
A factory for producing platform-appropriate ProcessProviders.
Typical use-cases:
Import
>>> from twitter.common.process import ProcessProviderFactory
>>> ps = ProcessProviderFactory.get()
Run a collection of all pids
>>> ps.collect_all()
Get a ProcessHandle to the init process
>>> init = ps.get_handle(1)
>>> init
<twitter.common.process.process_handle_ps.ProcessHandlePs object at 0x1004ad950>
Get stats
>>> init.cpu_time()
7980.0600000000004
>>> init.user()
'root'
>>> init.wall_time()
6485509.0
>>> init.pid()
1
>>> init.ppid()
0
Refresh stats
>>> init.refresh()
>>> init.cpu_time()
7982.9700000000003
Introspect the process tree
>>> list(ps.children_of(init.pid()))
[10, 11, 12, 13, 14, 15, 16, 17, 26, 32, 37, 38, 39, 40, 42, 43, 45,
51, 59, 73, 108, 140, 153, 157, 162, 166, 552, 1712, 1968, 38897,
58862, 63321, 64513, 66458, 68598, 78610, 85633, 91019, 97271]
Aggregations
>>> sum(map(lambda pid: ps.get_handle(pid).cpu_time(), ps.children_of(init.pid())))
228574.40999999995
Collect data from a subset of processes
>>> ps.collect_set(ps.children_of(init.pid()))
Re-evalua | te
>>> sum(map(lambda pid: ps.get_handle(pid).cpu_time(), ps.children_of(init.pid())))
228642.19999999998
| """
PROVIDERS = [
ProcessProvider_Procfs,
ProcessProvider_PS
]
@staticmethod
def get():
"""
Return a platform-specific ProcessProvider.
"""
for provider in ProcessProviderFactory.PROVIDERS:
if provider._platform_compatible():
return provider()
|
tpltnt/SimpleCV | SimpleCV/ImageClass.py | Python | bsd-3-clause | 517,130 | 0.0112 | from __future__ import print_function
from __future__ import absolute_import
# Load required libraries
from SimpleCV.base import *
from SimpleCV.Color import *
from SimpleCV.LineScan import *
from numpy import int32
from numpy import uint8
import cv2
from .EXIF import *
if not init_options_handler.headless:
import pygame as pg
import scipy.ndimage as ndimage
import scipy.stats.stats as sss # for auto white balance
import scipy.cluster.vq as scv
import scipy.linalg as nla # for linear algebra / least squares
import math
import copy # for deep copy
try:
basestring
except NameError: # Python 3
basestring = (str, bytes)
class ColorSpace:
"""
**SUMMARY**
The colorspace class is used to encapsulate the color space of a given image.
This class acts like C/C++ style enumerated type.
See: http://stackoverflow.com/questions/2122706/detect-color-space-with-opencv
"""
UNKNOWN = 0
BGR = 1
GRAY = 2
RGB = 3
HLS = 4
HSV = 5
XYZ = 6
YCrCb = 7
class ImageSet(list):
"""
**SUMMARY**
This is an abstract class for keeping a list of images. It has a few
advantages in that you can use it to auto load data sets from a directory
or the net.
Keep in mind it inherits from a list too, so all the functionality a
normal python list has this will too.
**EXAMPLES**
>>> imgs = ImageSet()
>>> imgs.download("ninjas")
>>> imgs.show(ninjas)
or you can load a directory path:
>>> imgs = ImageSet('/path/to/imgs/')
>>> imgs.show()
This will download and show a bunch of random ninjas. If you want to
save all those images locally then just use:
>>> imgs.save()
You can also load up the sample images that come with simplecv as:
>>> imgs = ImageSet('samples')
| >>> imgs.filelist
>>> logo = imgs.find('simplecv.png')
**TO DO**
Eventually this should allow us to pull image urls / pat | hs from csv files.
The method also allow us to associate an arbitraty bunch of data with each
image, and on load/save pickle that data or write it to a CSV file.
"""
filelist = None
def __init__(self, directory=None):
if not directory:
return
if isinstance(directory, list):
if isinstance(directory[0], Image):
super(ImageSet, self).__init__(directory)
elif isinstance(directory[0], str) or isinstance(directory[0], unicode):
super(ImageSet, self).__init__(map(Image, directory))
elif directory.lower() == 'samples' or directory.lower() == 'sample':
pth = LAUNCH_PATH
pth = os.path.realpath(pth)
directory = os.path.join(pth, 'sampleimages')
self.load(directory)
else:
self.load(directory)
def download(self, tag=None, number=10, size='thumb'):
"""
**SUMMARY**
This function downloads images from Google Image search based
on the tag you provide. The number is the number of images you
want to have in the list. Valid values for size are 'thumb', 'small',
'medium', 'large' or a tuple of exact dimensions i.e. (640,480).
Note that 'thumb' is exceptionally faster than others.
.. Warning::
This requires the python library Beautiful Soup to be installed
http://www.crummy.com/software/BeautifulSoup/
**PARAMETERS**
* *tag* - A string of tag values you would like to download.
* *number* - An integer of the number of images to try and download.
* *size* - the size of the images to download. Valid options a tuple
of the exact size or a string of the following approximate sizes:
* thumb ~ less than 128x128
* small ~ approximately less than 640x480 but larger than 128x128
* medium ~ approximately less than 1024x768 but larger than 640x480.
* large ~ > 1024x768
**RETURNS**
Nothing - but caches local copy of images.
**EXAMPLE**
>>> imgs = ImageSet()
>>> imgs.download("ninjas")
>>> imgs.show(ninjas)
"""
try:
from BeautifulSoup import BeautifulSoup
except:
print("You need to install Beatutiul Soup to use this function")
print("to install you can use:")
print("easy_install beautifulsoup")
return
INVALID_SIZE_MSG = """I don't understand what size images you want.
Valid options: 'thumb', 'small', 'medium', 'large'
or a tuple of exact dimensions i.e. (640,480)."""
if isinstance(size, basestring):
size = size.lower()
if size == 'thumb':
size_param = ''
elif size == 'small':
size_param = '&tbs=isz:s'
elif size == 'medium':
size_param = '&tbs=isz:m'
elif size == 'large':
size_param = '&tbs=isz:l'
else:
print(INVALID_SIZE_MSG)
return None
elif type(size) == tuple:
width, height = size
size_param = '&tbs=isz:ex,iszw:' + str(width) + ',iszh:' + str(height)
else:
print(INVALID_SIZE_MSG)
return None
# Used to extract imgurl parameter value from a URL
imgurl_re = re.compile('(?<=(&|\?)imgurl=)[^&]*((?=&)|$)')
add_set = ImageSet()
candidate_count = 0
while len(add_set) < number:
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
url = ("http://www.google.com/search?tbm=isch&q=" + urllib2.quote(tag) +
size_param + "&start=" + str(candidate_count))
page = opener.open(url)
soup = BeautifulSoup(page)
img_urls = []
# Gets URLs of the thumbnail images
if size == 'thumb':
imgs = soup.findAll('img')
for img in imgs:
dl_url = str(dict(img.attrs)['src'])
img_urls.append(dl_url)
# Gets the direct image URLs
else:
for link_tag in soup.findAll('a', {'href': re.compile('imgurl=')}):
dirty_url = link_tag.get('href') # URL to an image as given by Google Images
dl_url = str(re.search(imgurl_re, dirty_url).group()) # the direct URL to the image
img_urls.append(dl_url)
for dl_url in img_urls:
try:
add_img = Image(dl_url, verbose=False)
# Don't know a better way to check if the image was actually returned
if add_img.height != 0 and add_img.width != 0:
add_set.append(add_img)
except:
# do nothing
None
if len(add_set) >= number:
break
self.extend(add_set)
def upload(self, dest, api_key=None, api_secret=None, verbose=True):
"""
**SUMMARY**
Uploads all the images to imgur or flickr or dropbox. In verbose mode URL values are printed.
**PARAMETERS**
* *api_key* - a string of the API key.
* *api_secret* - (required only for flickr and dropbox ) a string of the API secret.
* *verbose* - If verbose is true all values are printed to the screen
**RETURNS**
if uploading is successful
- Imgur return the original image URL on success and None if it fails.
- flickr returns True on success, else returns False.
- dropbox returns True on success.
**EXAMPLE**
TO upload image to imgur::
>>> imgset = ImageSet("/home/user/Desktop")
>>> result = imgset.upload( 'imgur',"MY_API_KEY1234567890" )
>>> print "Uploaded To: " + result[0]
To upload image to flickr::
>>> imgset.upload('flickr','api_key','api_secret')
>>> imgset.upload('flickr') #Once the api keys and secret keys are cached.
|
popazerty/dvbapp2-gui | lib/python/Plugins/Extensions/EGAMIPermanentClock/plugin.py | Python | gpl-2.0 | 5,326 | 0.024972 | ##
## Permanent Clock
## by AliAbdul
##
from Components.ActionMap import ActionMap
from Components.config import config, ConfigInteger, ConfigSubsection, ConfigYesNo
from Components.MenuList import MenuList
from enigma import ePoint, eTimer, getDesktop
from os import environ
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Tools.Directories import resolveFilename, SCOPE_LANGUAGE, SCOPE_PLUGINS
from EGAMI.EGAMI_skins import EGPermanentClock_Skin
##############################################################################
config.plugins.PermanentClock = ConfigSubsection()
config.plugins.PermanentClock.enabled = ConfigYesNo(default=False)
config.plugins.PermanentClock.position_x = ConfigInteger(default=590)
config.plugins.PermanentClock.position_y = ConfigInteger(default=35)
##############################################################################
class PermanentClockScreen(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = EGPermanentClock_Skin
self.onShow.append(self.movePosition)
def movePosition(self):
if self.instance:
self.instance.move(ePoint(config.plugins.PermanentClock.position_x.value, config.plugins.PermanentClock.position_y.value))
##############################################################################
class PermanentClock():
def __init__(self):
self.dialog = None
def gotSession(self, session):
self.dialog = session.instantiateDialog(PermanentClockScreen)
self.showHide()
def changeVisibility(self):
if config.plugins.PermanentClock.enabled.value:
config.plugins.PermanentClock.enabled.value = False
else:
config.plugins.PermanentClock.enabled.value = True
config.plugins.PermanentClock.enabled.save()
self.showHide()
def showHide(self):
if config.plugins.PermanentClock.enabled.value:
self.dialog.show()
else:
self.dialog.hide()
pClock = PermanentClock()
##############################################################################
class PermanentClockPositioner(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.skin = EGPermanentClock_Skin
self["actions"] = ActionMap(["WizardActions"],
{
"left": self.left,
"up": self.up,
"right": self.right,
"down": self.down,
"ok": self.ok,
"back": self.exit
}, -1)
desktop = getDesktop(0)
self.desktopWidth = desktop.size().width()
self | .desktopHeight = desktop.size().height()
self.moveTimer = eTimer()
self.moveTimer.callback.append(self.movePosition)
self.moveTimer.start(100, 1)
def movePosition(self):
self.instance.move(ePoint(config.plugins.PermanentClock.position_x.value, config.plugins.PermanentClo | ck.position_y.value))
self.moveTimer.start(100, 1)
def left(self):
value = config.plugins.PermanentClock.position_x.value
value -= 10
if value < 0:
value = 0
config.plugins.PermanentClock.position_x.value = value
def up(self):
value = config.plugins.PermanentClock.position_y.value
value -= 10
if value < 0:
value = 0
config.plugins.PermanentClock.position_y.value = value
def right(self):
value = config.plugins.PermanentClock.position_x.value
value += 10
if value > self.desktopWidth:
value = self.desktopWidth
config.plugins.PermanentClock.position_x.value = value
def down(self):
value = config.plugins.PermanentClock.position_y.value
value += 10
if value > self.desktopHeight:
value = self.desktopHeight
config.plugins.PermanentClock.position_y.value = value
def ok(self):
config.plugins.PermanentClock.position_x.save()
config.plugins.PermanentClock.position_y.save()
self.close()
def exit(self):
config.plugins.PermanentClock.position_x.cancel()
config.plugins.PermanentClock.position_y.cancel()
self.close()
##############################################################################
class PermanentClockMenu(Screen):
skin = """
<screen position="center,center" size="420,105" title="%s">
<widget name="list" position="10,10" size="400,85" />
</screen>""" % _("Permanent Clock")
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self["list"] = MenuList([])
self["actions"] = ActionMap(["OkCancelActions"], {"ok": self.okClicked, "cancel": self.close}, -1)
self.onLayoutFinish.append(self.showMenu)
def showMenu(self):
list = []
if config.plugins.PermanentClock.enabled.value:
list.append(_("Deactivate permanent clock"))
else:
list.append(_("Activate permanent clock"))
list.append(_("Change permanent clock position"))
self["list"].setList(list)
def okClicked(self):
sel = self["list"].getCurrent()
if pClock.dialog is None:
pClock.gotSession(self.session)
if sel == _("Deactivate permanent clock") or sel == _("Activate permanent clock"):
pClock.changeVisibility()
self.showMenu()
else:
pClock.dialog.hide()
self.session.openWithCallback(self.positionerCallback, PermanentClockPositioner)
def positionerCallback(self, callback=None):
pClock.showHide()
##############################################################################
def sessionstart(reason, **kwargs):
if reason == 0:
pClock.gotSession(kwargs["session"])
def Plugins(**kwargs):
return [
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART], fnc=sessionstart)]
|
DigitalPublishingToolkit/Society-of-the-Query-Reader | scripts/gather_essays.py | Python | gpl-3.0 | 470 | 0.002128 | import os, re, shutil
for (base, _, files) in os.walk("essays",):
for f in files:
if f.e | ndswith(".markdown"):
fp = os.path.join(base, f)
_, np = os.path.split(base)
np = re.sub(r"_def$", "", np)
np = os.path.join("essays", np+".markdown")
# print fp, "=>", np
# shutil.copy(fp, np)
cmd = 'git mv "{0}" "{1}"'.format(fp, np)
print cmd
o | s.system(cmd)
|
glennyonemitsu/funkybomb | website/handlers/docs/integrations.py | Python | apache-2.0 | 634 | 0 | from funkybomb import Template, Text
from application.util import route
from templates import documentation
from templ | ates.util import template
@route('/docs/integrations')
@template(documentation.tmpl)
async def docs_integrations_home(req):
tmpl = Template()
tmpl.p + | 'Coming soon.'
return {
'content': tmpl,
'headline': Text('Integrations')
}
@route('/docs/integrations/flask')
@template(documentation.tmpl)
async def docs_integrations_flask(req):
tmpl = Template()
tmpl.p + 'Coming soon.'
return {
'content': tmpl,
'headline': Text('Integrating with Flask')
}
|
mutability/kafka-python | kafka/client.py | Python | apache-2.0 | 21,917 | 0.001414 | import collections
import copy
import functools
import logging
import time
import kafka.common
from kafka.common import (TopicAndPartition, BrokerMetadata,
ConnectionError, FailedPayloadsError,
KafkaTimeoutError, KafkaUnavailableError,
LeaderNotAvailableError, UnknownTopicOrPartitionError,
NotLeaderForPartitionError, ReplicaNotAvailableError)
from kafka.conn import collect_hosts, KafkaConnection, DEFAULT_SOCKET_TIMEOUT_SECONDS
from kafka.protocol import KafkaProtocol
from kafka.util import kafka_bytestring
log = logging.getLogger(__name__)
class KafkaClient(object):
CLIENT_ID = b'kafka-python'
# NOTE: The timeout given to the client should always be greater than the
# one passed to SimpleConsumer.get_message(), otherwise you can get a
# socket timeout.
def __init__(self, hosts, client_id=CLIENT_ID,
timeout=DEFAULT_SOCKET_TIMEOUT_SECONDS,
correlation_id=0):
# We need one connection to bootstrap
self.client_id = kafka_bytestring(client_id)
self.timeout = timeout
self.hosts = collect_hosts(hosts)
self.correlation_id = correlation_id
# create connections only when we need them
self.conns = {}
self.brokers = {} # broker_id -> BrokerMetadata
self.topics_to_brokers = {} # TopicAndPartition -> BrokerMetadata
self.topic_partitions = {} # topic -> partition -> PartitionMetadata
self.load_metadata_for_topics() # bootstrap with all metadata
##################
# Private API #
##################
def _get_conn(self, host, port):
"""Get or create a connection to a broker using host and port"""
host_key = (host, port)
if host_key not in self.conns:
self.conns[host_key] = KafkaConnection(
host,
port,
timeout=self.timeout
)
return self.conns[host_key]
def _get_leader_for_partition(self, topic, partition):
"""
Returns the leader for a partition or None if the partition exists
but has no leader.
UnknownTopicOrPartitionError will be raised if the topic or partition
is not part of the metadata.
LeaderNotAvailableError is raised if server has metadata, but there is
no current leader
"""
key = TopicAndPartition(topic, partition)
# Use cached metadata if it is there
if self.topics_to_brokers.get(key) is not None:
return self.topics_to_brokers[key]
# Otherwise refresh metadata
# If topic does not already exist, this will raise
# UnknownTopicOrPartitionError if not auto-creating
# LeaderNotAvailableError otherwise until partitions are created
self.load_metadata_for_topics(topic)
# If the partition doesn't actually exist, raise
if partition not in self.topic_partitions.get(topic, []):
raise UnknownTopicOrPartitionError(key)
# If there's no leader for the partition, raise
meta = self.topic_partitions[topic][partition]
if meta.leader == -1:
raise LeaderNotAvailableError(meta)
# Otherwise return the BrokerMetadata
return self.brokers[meta.leader]
def _next_id(self):
"""Generate a new correlation id"""
# modulo to keep w/i int32
self.correlation_id = (self.correlation_id + 1) % 2**31
return self.correlation_id
def _send_broker_unaware_request(self, payloads, encoder_fn, decoder_fn):
"""
Attempt to send a broker-agnostic request to one of the available
brokers. Keep trying until you succeed.
"""
for (host, port) in self.hosts:
requestId = self._next_id()
log.debug('Request %s: %s', requestId, payloads)
try:
conn = self._get_conn(host, port)
request = encoder_fn(client_id=self.client_id,
correlation_id=requestId,
payloads=payloads)
conn.send(requestId, request)
response = conn.recv(requestId)
decoded = decoder_fn(response)
log.debug('Response %s: %s', requestId, decoded)
return decoded
except Exception:
log.exception('Error sending request [%s] to server %s:%s, '
'trying next server', requestId, host, port)
raise KafkaUnavailableError('All servers failed to process request')
def _send_broker_aware_request(self, payloads, encoder_fn, decoder_fn):
"""
Group a list of request payloads by topic+partition and send them to
the leader broker for that partition using the supplied encode/decode
functions
Arguments:
payloads: list of object-like entities with a topic (str) and
partition (int) attribute; payloads with duplicate topic-partitions
are not supported.
encode_fn | : a method to encode the list of payloads to a request body,
must accept client_id, correlation_id, and payloads as
keyword arguments
decode_fn: a method to decode a response body into response objects.
The response objects must be object-like and have topic
and partition attribut | es
Returns:
List of response objects in the same order as the supplied payloads
"""
# encoders / decoders do not maintain ordering currently
# so we need to keep this so we can rebuild order before returning
original_ordering = [(p.topic, p.partition) for p in payloads]
# Group the requests by topic+partition
brokers_for_payloads = []
payloads_by_broker = collections.defaultdict(list)
responses = {}
for payload in payloads:
try:
leader = self._get_leader_for_partition(payload.topic,
payload.partition)
payloads_by_broker[leader].append(payload)
brokers_for_payloads.append(leader)
except KafkaUnavailableError as e:
log.warning('KafkaUnavailableError attempting to send request '
'on topic %s partition %d', payload.topic, payload.partition)
topic_partition = (payload.topic, payload.partition)
responses[topic_partition] = FailedPayloadsError(payload)
# For each broker, send the list of request payloads
# and collect the responses and errors
broker_failures = []
for broker, payloads in payloads_by_broker.items():
requestId = self._next_id()
log.debug('Request %s to %s: %s', requestId, broker, payloads)
request = encoder_fn(client_id=self.client_id,
correlation_id=requestId, payloads=payloads)
# Send the request, recv the response
try:
conn = self._get_conn(broker.host.decode('utf-8'), broker.port)
conn.send(requestId, request)
except ConnectionError as e:
broker_failures.append(broker)
log.warning('ConnectionError attempting to send request %s '
'to server %s: %s', requestId, broker, e)
for payload in payloads:
topic_partition = (payload.topic, payload.partition)
responses[topic_partition] = FailedPayloadsError(payload)
# No exception, try to get response
else:
# decoder_fn=None signal that the server is expected to not
# send a response. This probably only applies to
# ProduceRequest w/ acks = 0
if decoder_fn is None:
log.debug('Request %s does not expect a response '
'(skipping conn.recv)', |
inventree/InvenTree | InvenTree/part/test_migrations.py | Python | mit | 1,487 | 0 | """
Unit tests for the part model database migrations
"""
from django_test_migrations.contrib.unittest_case import MigratorTestCase
from InvenTree import helpers
class TestForwardMigrations(MigratorTestCase):
"""
Test entire schema migration sequence for the part app
"""
migrate_from = ('part', helpers.getOldestMigrationFile | ('part'))
migrate_to = ('part', helpers.getNewestMigrationFile('part'))
def prepare(self):
"""
Create initial data
"""
Part = self.old_state.apps.get_model('part', 'part')
Part.objects.create(name='A', description='My part A')
Part.objects.create(name='B', description='My part B')
Part.objects.create(name='C', description='My part | C')
Part.objects.create(name='D', description='My part D')
Part.objects.create(name='E', description='My part E')
# Extract one part object to investigate
p = Part.objects.all().last()
# Initially some fields are not present
with self.assertRaises(AttributeError):
print(p.has_variants)
with self.assertRaises(AttributeError):
print(p.is_template)
def test_models_exist(self):
Part = self.new_state.apps.get_model('part', 'part')
self.assertEqual(Part.objects.count(), 5)
for part in Part.objects.all():
part.is_template = True
part.save()
part.is_template = False
part.save()
|
the-blue-alliance/the-blue-alliance | src/backend/common/queries/tests/district_history_query_test.py | Python | mit | 1,069 | 0 | from backend.common.models.district import District
from backend.common.models.keys import DistrictAbbreviation, Year
from backend.common.queries.district_query import DistrictHistoryQuery
def preseed_district(year: Year, abbrev: DistrictAbbreviation) -> None:
d = District(
id=f"{year}{abbrev}",
year=year,
abbreviation=abbrev,
)
d.put()
def test_no_districts() -> None:
districts = DistrictHistoryQuery(abbreviation="ne").fetch()
assert districts == []
def test_district_history() -> None:
preseed_district(2019, "fim")
preseed_district(2019, "ne")
preseed_district(2018, "ne")
districts = DistrictHistoryQuery(abbreviation="ne").fetch()
assert len(districts) == 2
def test_district_history_across_rename() -> None:
preseed_district(2019, "fma")
pr | eseed_district(2018, "mar")
districts_fma = DistrictHistoryQuery(abbreviation="fma").fetch()
districts_mar = DistrictHistoryQuery(abbreviation="mar").fetch()
assert len(districts_fma) == 2
assert len(distric | ts_mar) == 2
|
erigones/esdc-ce | gui/dc/dns/forms.py | Python | apache-2.0 | 10,134 | 0.004638 | from operator import and_
from functools import reduce
from django import forms
from django.db.models import Q
from django.utils.six import PY3
from django.utils.translation import ugettext_lazy as _
from api.dc.domain.views import | dc_domain
from api.dns.domain.views import dns_domain
from api.dns.record.views import dns_record_list, dns_record
from api.vm.utils import get_owners
from gui.forms import SerializerForm
from gui.fields import ArrayField
from gui.widgets import NumberInput
from pdns.models import Domain, Record
TEXT_INPUT_ATTRS = {'class': 'input-transparent narrow', 'required': 'required'}
SELECT_ATTRS = {'class': 'narrow input-select2'}
if PY3:
t_long = int
else:
t_long = long # noqa: F821
c | lass DcDomainForm(SerializerForm):
"""
Create or remove DC<->DNS Domain link by calling dc_domain.
"""
_api_call = dc_domain
name = forms.ChoiceField(label=_('Domain'), required=True,
widget=forms.Select(attrs={'class': 'input-select2 narrow disable_created2'}))
def __init__(self, request, domains, *args, **kwargs):
super(DcDomainForm, self).__init__(request, None, *args, **kwargs)
self.fields['name'].choices = domains.values_list('name', 'name')
def _final_data(self, data=None):
return {}
class AdminDomainForm(SerializerForm):
"""
Create DNS domain by calling dns_domain.
"""
_api_call = dns_domain
dc_bound = forms.BooleanField(label=_('DC-bound?'), required=False,
widget=forms.CheckboxInput(attrs={'class': 'normal-check'}))
name = forms.CharField(label=_('Name'), max_length=255, required=True,
widget=forms.TextInput(attrs={'class': 'input-transparent narrow disable_created',
'required': 'required', 'pattern': '[A-Za-z0-9._-]+'}))
owner = forms.ChoiceField(label=_('Owner'), required=False,
widget=forms.Select(attrs=SELECT_ATTRS))
access = forms.TypedChoiceField(label=_('Access'), required=False, coerce=int, choices=Domain.ACCESS,
widget=forms.Select(attrs=SELECT_ATTRS))
type = forms.ChoiceField(label=_('Type'), required=False, choices=Domain.TYPE_MASTER,
widget=forms.Select(attrs=SELECT_ATTRS),
help_text=_('PowerDNS domain type. '
'MASTER - use DNS protocol messages to communicate changes '
'with slaves. NATIVE - use database replication '
'between master DNS server and slave DNS servers.'))
desc = forms.CharField(label=_('Description'), max_length=128, required=False,
widget=forms.TextInput(attrs={'class': 'input-transparent wide', 'required': ''}))
tsig_keys = forms.CharField(label=_('TSIG Key(s)'), max_length=1000, required=False,
widget=forms.TextInput(attrs={'class': 'input-transparent', 'required': ''}),
help_text=_('TSIG DNS keys for external zone transfers. Zone transfers to '
'external DNS slaves will only be allowed using this key. '
'For more info on how to generate the key see Danube Cloud docs.'
))
def __init__(self, request, domain, *args, **kwargs):
super(AdminDomainForm, self).__init__(request, domain, *args, **kwargs)
self.fields['owner'].choices = get_owners(request).values_list('username', 'username')
if not request.user.is_staff:
self.fields['dc_bound'].widget.attrs['disabled'] = 'disabled'
def _initial_data(self, request, obj):
return obj.web_data
def _final_data(self, data=None):
data = super(AdminDomainForm, self)._final_data(data=data)
if self.action == 'create': # Add dc parameter when doing POST (required by api.db.utils.get_virt_object)
data['dc'] = self._request.dc.name
return data
class DnsRecordFilterForm(forms.Form):
"""
Filter DNS records for a domain.
"""
all = forms.BooleanField(widget=forms.HiddenInput(attrs={'class': 'always-include-navigation'}), required=False)
domain = forms.ChoiceField(label=_('Domain'), required=False,
widget=forms.Select(attrs={'class': 'fill-up input-navigation select-transparent '
'always-include-navigation'}))
type = forms.ChoiceField(label=_('Type'), required=False, choices=(('', _('Type (all)')),) + Record.TYPE_USED,
widget=forms.Select(attrs={'class': 'fill-up input-navigation select-transparent'}))
name = forms.CharField(label=_('Name'), required=False,
widget=forms.TextInput(attrs={'class': 'fill-up input-navigation input-transparent',
'placeholder': _('Search by name')}))
content = forms.CharField(label=_('Content'), required=False,
widget=forms.TextInput(attrs={'class': 'fill-up input-navigation input-transparent',
'placeholder': _('Search by content')}))
changed_since = forms.DateField(label=_('Changed since'), required=False, input_formats=('%Y-%m-%d',),
widget=forms.DateInput(format='%Y-%m-%d',
attrs={'placeholder': _('Changed since'),
'class': 'fill-up input-navigation input-transparent '
'input-date'}))
def __init__(self, request, data, _all=False, **kwargs):
super(DnsRecordFilterForm, self).__init__(data, **kwargs)
domains = Domain.objects.order_by('name')
user, dc = request.user, request.dc
if request.GET.get('deleted', False):
domains = domains.exclude(access=Domain.INTERNAL)
else:
domains = domains.exclude(access__in=Domain.INVISIBLE)
if user.is_staff and _all:
domain_choices = [(d.name, d.name) for d in domains]
else:
dc_domain_ids = list(dc.domaindc_set.values_list('domain_id', flat=True))
domains = domains.filter(Q(id__in=dc_domain_ids) | Q(user=user.id))
domain_choices = [(d.name, d.name) for d in domains
if (user.is_staff or d.user == user.id or d.dc_bound == dc.id)]
self.fields['domain'].choices = domain_choices
def get_filters(self):
data = self.cleaned_data
query = []
_type = data.get('type')
if _type:
query.append(Q(type=_type))
name = data.get('name')
if name:
query.append(Q(name__icontains=name))
content = data.get('content')
if content:
query.append(Q(content__icontains=content))
changed_since = data.get('changed_since')
if changed_since:
query.append(Q(change_date__gte=changed_since.strftime('%s')))
if query:
return reduce(and_, query)
else:
return None
class DnsRecordForm(SerializerForm):
"""
Create, update or delete network DNS record.
"""
_ip = None
_api_call = dns_record
template = 'gui/dc/domain_record_form.html'
id = forms.IntegerField(label=_('ID'), required=True, widget=forms.HiddenInput())
name = forms.CharField(label=_('Name'), required=True,
help_text=_('The full URI the DNS server should pick up on.'),
widget=forms.TextInput(attrs=TEXT_INPUT_ATTRS))
content = forms.CharField(label=_('Content'), required=False,
# help_text=_('The answer of the DNS query.'),
|
bryantdo/PiPID | pypid/backend/__init__.py | Python | gpl-3.0 | 6,272 | 0.000478 | # Copyright (C) 2011-2012 W. Trevor King <wking@tremily.us>
#
# This file is part of pypid.
#
# pypid is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# pypid is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pypid. If not, see <http://www.gnu.org/licenses/>.
"""Assorted backends for interfacing with your particular hardware.
"""
def _import_by_name(modname):
"""
>>> mod = _import_by_name('pypid.backend.melcor')
>>> 'MelcorBackend' in dir(mod)
True
>>> _import_by_name('pypid.backend.highly_unlikely')
Traceback (most recent call last):
...
ImportError: No module named highly_unlikely
"""
module = __import__(modname)
components = modname.split('.')
for comp in components[1:]:
module = getattr(module, comp)
return module
def get_backend(name):
n = '%s.%s' % (__name__, name)
mod = _import_by_name(n)
for attr in dir(mod):
obj = getattr(mod, attr)
try:
if obj != Backend and issubclass(obj, Backend):
return obj
except TypeError:
pass
raise ValueError(name)
class Backend (object):
"""Process-control backend
There are several common forms for a PID control formula. For the
purpose of setting heating and cooling gains (`.get_*_gains()` and
`.set_*_gains()`), we'll use the standard form::
MV(t) = K_p ( e(t) + 1/T_i \int_0^t e(\tau) d\tau + T_d de(t)/dt )
where `e(t) = SP - PV` is the error function, MV is the
manipulated variable, SP is the setpoint, and PV is the process
variable.
In this formulation, the parameter units will be:
* K_p: MV-units/PV-units (e.g. amp/K for a thermoelectric
controller). Don't confuse this `proportional gain` with the
`process gain` used in `TestBackend`.
* T_i, T_d: time (e.g. seconds)
"""
pv_units = 'PV-units'
mv_units = 'MV-units'
def __init__(self):
self._max_mv = None
def cleanup(self):
"Release resources and disconnect from any hardware."
pass
def get_pv(self):
"Return the current process variable in PV-units"
raise NotImplementedError()
def get_ambient_pv(self):
"Return the ambient (bath) status in PV-units"
raise NotImplementedError()
def set_max_mv(self, max):
"Set the max manipulated variable in MV-units"
raise NotImplementedError()
def get_max_mvt(self):
"Get the max manipulated variable MV-units"
raise NotImplementedError()
def get_mv(self):
"""Return the calculated manipulated varaible in MV-units
The returned current is not the actual MV, but the MV that the
controller calculates it should generate. For example, if the
voltage required to generate an MV current exceeds the
controller's max voltage, then the physical current will be
less than the value returned here.
"""
raise NotImplementedError()
def get_modes(self):
"Return a list of control modes supported by this backend"
raise NotImplementedError()
def get_mode(self):
"Return the current control mode"
raise NotImplementedError()
def set_mode(self, mode):
"Set the current control mode"
raise NotImplementedError
def dump_configuration(self):
"""
"""
raise NotImplementedError()
def restore_configuration(self):
"""
"""
raise NotImplementedError()
class ManualMixin (object):
def set_mv(self, current):
"Set the desired manipulated variable in MV-units"
raise NotImplementedError()
class PIDMixin (object):
def set_setpoint(self, setpoint):
"Set the process variable setpoint in PV-units"
raise NotImplementedError()
def get_setpoint(self, setpoint):
"Get the process variable setpoint in PV-units"
raise NotImplementedError()
def get_down_gains(self):
"""..."""
raise NotImplementedError()
def set_down_gains(self, proportional=None, integral=None,
derivative=None):
"""
...
"""
raise NotImplementedError()
def get_up_gains(self):
"""..." | ""
raise NotImplementedError()
def set_up_gains(self, proportional=None, integral=None, derivative=None):
"""
...
"""
raise NotImplementedError()
def get_feedback_terms(self):
"""Experimental
"""
raise | NotImplementedError()
def clear_integral_term(self):
"""Reset the integral feedback turn (removing integrator windup)
Because the proportional term provides no control signal when
the system exactly matches the setpoint, a P-only algorithm
will tend to "droop" off the setpoint. The equlibrium
position is one where the droop-generated P term balances the
systems temperature leakage. To correct for this, we add the
integral feedback term, which adjusts the control signal to
minimize long-term differences between the output and setpoint.
One issue with the integral term is "integral windup". When
the signal spends a significant time away from the setpoint
(e.g. during a long ramp up to operating temperature), the
integral term can grow very large, causing overshoot once the
output reaches the setpoint. To allow our controller to avoid
this, this method manually clears the intergal term for the
backend.
"""
raise NotImplementedError()
class TemperatureMixin (object):
@staticmethod
def _convert_F_to_C(F):
return (F - 32)/1.8
@staticmethod
def _convert_C_to_F(C):
return C*1.8 + 32
|
OsirisSPS/osiris-sps | client/data/extensions/148B613D055759C619D5F4EFD9FDB978387E97CB/scripts/portals/peers.py | Python | gpl-3.0 | 3,117 | 0.048444 | import osiris
import os
import osiris.events
class Page(osiris.IPortalPage):
def __init__(self, session):
osiris.IPortalPage.__init__(self, session)
self.ajax = (session.request.getUrlParam("act") != "")
def getPageName(self):
return "portal.pages.peers"
def isMcpModeRequired(self):
return True
def isIsisAllowed(self):
return False
def onLoad(self):
osiris.IPortalPage.onLoad(self)
self.act = self.session.request.getUrlParam("act")
if(self.act == ""):
self.act = "home"
self.document = osiris.XMLDocument()
self.root = self.document.create(self.act)
template = osiris.HtmlXSLControl()
template.stylesheet = self.loadStylesheet(os.path.join(os.path.dirname(__file__), "peers.xsl"))
template.document = self.document
self.document.root.setAttributeString("page_url",self.request.rawUrl);
if(self.act == "home"):
self.addPeerIP = osiris.HtmlTextBox()
self.addPeerIP.id = "addPeerIP"
self.addPeerIP.css = "os_input_ | full"
template.addChildParam(self.addPeerIP)
self.addPeerPort = osiris.HtmlTextBox()
self.addPeerPort.id = "addPeerPort"
self.addPeerPort.css = "os_input_full" |
template.addChildParam(self.addPeerPort)
self.addPeerCommand = osiris.IdeButton(self.getText("common.actions.add"))
self.addPeerCommand.id = "addPeerCommand"
osiris.events.connect(self.addPeerCommand.eventClick, self.onAddPeer)
template.addChildParam(self.addPeerCommand)
#if(self.act == "your"):
#
# client = osiris.Engine.instance().createHttpClient()
#
# url = "check.php?port=" + str(osiris.Options.instance().getServerPort()) + "&output=xml";
# if(osiris.Options.instance().getOptionBool("p2p.enable") == False):
# url += "¬est";
# client.perform(osiris.HttpUrl(osiris.Options.instance().getIsisLink(url)))
# #osiris.LogManager.instance().log(osiris.Options.instance().getIsisLink(url))
# self.document.parseBuffer(client.response.content.content)
# self.document.root.setAttributeBool("p2p_enabled",osiris.Options.instance().getOptionBool("p2p.enable"))
if(self.ajax):
self.controls.add(template)
else:
self.getArea(osiris.pageAreaContent).controls.add(template)
def onPreRender(self):
osiris.IPortalPage.onPreRender(self)
if(self.act == "home"):
self.portal.peersManager.exportXML(self.document.root)
def onAddPeer(self, args):
ip = self.addPeerIP.value
port = self.addPeerPort.value
if(ip == ""):
self.showError(self.getText("portal.pages.peers.error.invalidIp"))
return
if(port == ""):
self.showError(self.getText("portal.pages.peers.error.invalidPort"))
return
endpoint = osiris.IPAddress()
if(endpoint.setEndpoint(ip, int(port)) == False):
self.showError(self.getText("portal.pages.peers.error.invalidIp"))
return
if(self.portal.getPeersManager().savePeer(endpoint, True, "manual")):
self.showMessage(self.getText("portal.pages.peers.message.peerInserted"))
else:
self.showError(self.getText("portal.pages.peers.message.error.cannotInsertPeer"))
return
|
lucasoldaini/dict2csv | dict2csv.py | Python | mit | 3,606 | 0 | import csv
from StringIO import StringIO
from math import ceil
from collections import Mapping, Sequence
def __expand_container(cont, i, j, empty_sym=''):
""" Expand, if possible, the list of list cont of size (h, k) to a list
of lists of size (i, j). If the expansion is successful, newly
created elements are filled with data empty_sym.
"""
for ln in cont:
# expand horizontally
if len(ln) < j:
ln.extend([empty_sym for k in range((j - len(ln)))])
if len(cont) < i:
# expand vertically
cont.extend([[empty_sym for k in range(j)]
for h in range((i - len(cont)))])
def __recursive_insert_data(di, data_cont, col_index):
""" Recursively insert data into data_cont (list of list)
while visiting the data container di (either a dictionary-like
container or a list-like container) using DFS.
The position of data_cont in which the data is insert is
col_index; if data_cont is not big enough to accommodate the
data, it will be automatically expanded.
"""
print type(di), isinstance(di, Mapping)
if not(isinstance(di, Mapping)) and not(isinstance(di, Sequence)):
# reached the data, back up a position to insert it in!
return | col_index
new_col_index = col_index
# assign progressive index names starting from 0 if di
# is a list-li | ke object
di_iter = (di.iteritems() if isinstance(di, Mapping) else enumerate(di))
for k, v in di_iter:
# recursively insert data for the sublist of di
new_col_index = __recursive_insert_data(v, data_cont, new_col_index)
if new_col_index == col_index:
# previous iteration has reached the data, better dump!
__expand_container(data_cont, len(di), col_index + 1)
for i, elem in enumerate(di):
data_cont[i][col_index] = elem
return (col_index + 1)
else:
# di contains multiple subheaders, so no dumping
return new_col_index
def __recursive_build_header((name, di), heads_cont, left, depth):
""" Recursively detect headers in di. Headers are collected in
the container heads_cont.
The container is automatically expanded if needed.
"""
if not(isinstance(di, Mapping)) or not(isinstance(di, Sequence)):
return left
right = left
di_iter = (di.iteritems() if isinstance(di, Mapping) else enumerate(di))
for k, v in di_iter:
right = __recursive_build_header((k, v), heads_cont, right, depth + 1)
if left == right:
__expand_container(heads_cont, depth + 1, right + 1,)
heads_cont[depth][right] = name
right += 1
elif name is not None:
pos = left + (int(ceil(float(right - left) / 2)) - 1)
heads_cont[depth][pos] = name
return right
def dict2csv(di, csv_kwargs=None):
""" Input: a dictionary [of dictionaries]* containing data
(optional) arguments to control layout of csv file
Output: a string ready to be written as csv file
"""
# collect data
data_cont = []
__recursive_insert_data(di, data_cont, 0)
# format headers
heads_cont = []
__recursive_build_header((None, di), heads_cont, 0, 0)
heads_cont = heads_cont[1:]
# prepare output file
outstr = StringIO()
if csv_kwargs is None:
csv_kwargs = {}
wr = csv.writer(outstr, **csv_kwargs)
# write data
wr.writerows(heads_cont)
wr.writerows(data_cont)
# rewind and return data
outstr.seek(0)
outstr = outstr.read()
return outstr
|
idf-archive/OneLinerPython | Fibonacci.py | Python | apache-2.0 | 101 | 0.029703 | __autho | r__ = 'Danyang'
fib = lambda n : redu | ce(lambda x, n: [x[1], x[0]+x[1]], xrange(n), [0, 1])[0] |
WQuanfeng/wagtail | wagtail/wagtailadmin/apps.py | Python | bsd-3-clause | 172 | 0 | from django | .apps import AppConfig
class WagtailAdminAppConfig(AppConfig):
name = 'wagtail.wagtailadmin'
label = 'wagtailadmin | '
verbose_name = "Wagtail admin"
|
raphaottoni/arglex | arglex/macros.py | Python | gpl-3.0 | 1,719 | 0.006399 | #Macros commonly used
_be = "(be|is|am|are|were|was|been|being)"
_intensadj1 = "( absolutely| absurdly| resoundingly | amazingly | awfully | extremely | completely | highly | incredibly | perfectly | quite | really | strikingly | surprisingly | terribly | totally | unbelievably | hugely | unnaturally | unusually | utterly | | very | tremendously | spectacularly)"
_intensadv1= "{ absolutely| absurdly| resoundingly| ama | zingly| awfully| extremely| completely| highly| incredibly| perfectly| quite| really| strikingly| surprisingly| terribly| totally| unbelievably| hugely| unnaturally| unusually| utterly| very| tremendously| spectacularly}"
_intensadj1 = "( absolute| extreme| incredible| perfect| phenomenal| spectacular| huge| major| tremendous| complete| considerable| real| terrible| total| unbelievable| utter| great| resounding)"
_have = "(have|has|had|having)"
_GONNA = "( am going to| are going to| is going to| am gonna| are gonna| is gonna)"
_GONNANEG= "( am not going to| are not going to| is not going to| am not gonna| are not gonna| is not gonna| ain\'t gonna| isn\'t gonna| aren\'t gonna)"
_GONNACL = "( i\'m going to| they\'re going to| she\'s going to| it\'s going to| we\'re going to| i\'m gonna| you\'re gonna| i\'m gonna| you\'re gonna| he\'s gonna| she\'s gonna| it\'s gonna| we\'re gonna| they\'re gonna| that\'s gonnar)"
_GONNANEGCL = "( i\'m not going to| they\'re not going to| she\'s not going to| it\'s not going to| we\'re not going to| i\'m not gonna| you\'re not gonna| he\'s not gonna| she\'s not gonna| it\'s not gonna| we\'re not gonna| they\'re not gonna| that\'s not gonna)"
_pronsubj = "(i| you| he| she| it| we| they)"
_emo1v= "(like| adore| want| prefer| love| enjoy)"
|
conwetlab/ckanext-datarequests | ckanext/datarequests/tests/test_helpers.py | Python | agpl-3.0 | 4,934 | 0.003446 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of CKAN Data Requests Extension.
# CKAN Data Requests Extension is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN Data Requests Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN Data Requests Extension. If not, see <http://www.gnu.org/licenses/>.
import ckanext.datarequests.helpers as helpers
import unittest
from mock import MagicMock, patch
class HelpersTest(unittest.TestCase):
def setUp(self):
self.tk_patch = patch('ckanext.datarequests.helpers.tk')
self.tk_patch.start()
self.model_patch = patch('ckanext.datarequests.helpers.model')
self.model_patch.start()
self.db_patch = patch('ckanext.datarequests.helpers.db')
self.db_patch.start()
self.c_patch = patch('ckanext.datarequests.helpers.c')
self.c = self.c_patch.start()
def tearDown(self):
self.tk_patch.stop()
self.model_patch.stop()
self.db_patch.stop()
self.c_patch.stop()
def test_get_comments_number(self):
# Mocking
n_comments = 3
helpers.db.Comment.get_comment_datarequests_number.return_value = n_comments
# Call the function
datare | quest_id = 'example_uuidv4'
result = helpers.get_comments_number(datarequest_id)
| # Assertions
helpers.db.init_db.assert_called_once_with(helpers.model)
helpers.db.Comment.get_comment_datarequests_number.assert_called_once_with(datarequest_id=datarequest_id)
self.assertEquals(result, n_comments)
def test_get_comments_badge(self):
# Mocking
n_comments = 3
helpers.db.Comment.get_comment_datarequests_number.return_value = n_comments
# Call the function
datarequest_id = 'example_uuidv4'
result = helpers.get_comments_badge(datarequest_id)
# Assertions
helpers.db.init_db.assert_called_once_with(helpers.model)
helpers.db.Comment.get_comment_datarequests_number.assert_called_once_with(datarequest_id=datarequest_id)
self.assertEquals(result, helpers.tk.render_snippet.return_value)
helpers.tk.render_snippet.assert_called_once_with('datarequests/snippets/badge.html',
{'comments_count': n_comments})
def test_get_open_datarequests_number(self):
# Mocking
n_datarequests = 3
helpers.db.DataRequest.get_open_datarequests_number.return_value = n_datarequests
# Call the function
result = helpers.get_open_datarequests_number()
# Assertions
helpers.db.init_db.assert_called_once_with(helpers.model)
helpers.db.DataRequest.get_open_datarequests_number.assert_called_once_with()
self.assertEquals(result, n_datarequests)
def test_get_open_datarequests_badge_true(self):
# Mocking
n_datarequests = 3
helpers.db.DataRequest.get_open_datarequests_number.return_value = n_datarequests
# Call the function
result = helpers.get_open_datarequests_badge(True)
# Assertions
helpers.db.init_db.assert_called_once_with(helpers.model)
helpers.db.DataRequest.get_open_datarequests_number.assert_called_once_with()
self.assertEquals(result, helpers.tk.render_snippet.return_value)
helpers.tk.render_snippet.assert_called_once_with('datarequests/snippets/badge.html',
{'comments_count': n_datarequests})
def test_get_open_datarequests_badge_false(self):
self.assertEquals(helpers.get_open_datarequests_badge(False), '')
def test_is_following_datarequest_true(self):
follower = MagicMock()
datarequest_id = 'example_id'
helpers.db.DataRequestFollower.get.return_value = [follower]
self.assertTrue(helpers.is_following_datarequest(datarequest_id))
helpers.db.DataRequestFollower.get.assert_called_once_with(datarequest_id=datarequest_id, user_id=self.c.userobj.id)
def test_is_following_datarequest_false(self):
datarequest_id = 'example_id'
helpers.db.DataRequestFollower.get.return_value = []
self.assertFalse(helpers.is_following_datarequest(datarequest_id))
helpers.db.DataRequestFollower.get.assert_called_once_with(datarequest_id=datarequest_id, user_id=self.c.userobj.id)
|
asdf1011/bdec | bdec/choice.py | Python | lgpl-3.0 | 3,099 | 0.00355 | # Copyright (C) 2008-2011 Henry Ludemann
#
# This file is part of the bdec decoder library.
#
# The bdec decoder library is free software; you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# The bdec decoder library is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see
# <http://www.gnu.org/licenses/>.
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (c) 2010, PRESENSE Technologies GmbH
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the PRESENSE Technologies GmbH nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL PRESENSE Technologies GmbH BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import bdec.data as dt
import bdec.entry
class Choice(bdec.entry.Entry):
"""
An ent | ry that can be one of many entries.
The first entry to decode correctly will be used.
"""
def __init__(self, name, children, length=None):
bdec.entry.Entry.__init_ | _(self, name, length, children)
def _range(self, ignore_entries):
minimum = min(child.entry.range(ignore_entries).min for child in self.children)
maximum = max(child.entry.range(ignore_entries).max for child in self.children)
return bdec.entry.Range(minimum, maximum)
|
glimix/limix-genetics | limix_genetics/_colors.py | Python | mit | 259 | 0.003861 | from bokeh.palettes import brewer
def get_colors( | colors, labels):
if colors is None:
colors = dict()
colors_iter = iter(brewer['Spectral'][11])
for label in labels:
colors[label] | = next(colors_iter)
return colors
|
dabo02/Upwork_Tom_VideoShowroom | Back-End.py | Python | gpl-3.0 | 4,500 | 0.001556 | from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, Response
from celery import Celery
from werkzeug.utils import secure_filename
from VideoPlayer import VideoPlayer
from subprocess import Popen
import os
app = Flask(__name__)
local = False
if local:
UPLOAD_FOLDER = '/home/dabo02/Desktop/Projects/Side_Projects/Upwork_Tom_VideoShowroom/static/video/'
else:
UPLOAD_FOLDER='/home/pi/Desktop/Upwork_Tom_VideoShowroom/static/video/'
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(23, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(24, GPIO.OUT)
app.config['CELERY_BROKER_URL'] = 'amqp://'
app.config['CELERY_RESULT_BACKEND'] = 'amqp://'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
ALLOWED_EXTENSIONS = set(['mp3', 'mp4'])
light_state = False
exit_flag = False
current_video = None
preview_video = ''
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
def check_for_current():
global current_video
if not current_video:
list_of_videos = os.listdir(UPLOAD_FOLDER)
current_video = list_of_videos[0]
@celery.task
def main_routine(): |
vp = VideoPlayer()
while True:
mag_switch = GPIO.input(23)
if mag_switch:
if not vp.video_is_playing:
GPIO.output(24, 0)
check_for_current()
global current_video
vp.set_video(UPLOAD_FOLDER + current_video)
vp.play_video()
else:
GPIO.output(24, 1)
vp.stop_v | ideo()
@app.route('/')
def dashboard():
video_list = os.listdir(UPLOAD_FOLDER)
video_info = {}
videos = []
global current_video
global preview_video
global light_state
preview = ''
for v in video_list:
if current_video:
if current_video in v:
current = True
else:
current = False
else:
current = False
if preview_video:
if preview_video in v:
preview = v
name = v.rsplit('.', 1)[0]
video_info = {'name': name, 'id': v, 'current': current}
videos.append(video_info)
return render_template('index.html', videos=videos, preview=preview, light_state=light_state)
@app.route('/upload_video', methods=['POST'])
def upload_video():
if 'video' not in request.files:
flash('No file part')
return redirect(url_for('dashboard'))
file = request.files['video']
if file.filename == '':
flash('No selected file')
return redirect(url_for('dashboard'))
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
file.save(filepath)
return redirect(url_for('dashboard'))
@app.route('/remove_video/<id>', methods=['GET'])
def remove_video(id):
video_to_remove = UPLOAD_FOLDER + '/' + id
os.remove(os.path.join(app.config['UPLOAD_FOLDER'], video_to_remove))
return redirect(url_for('dashboard'))
@app.route('/update_video/<id>', methods=['GET'])
def change_current_video(id):
new_video = id
global current_video
current_video = new_video
return redirect(url_for('dashboard'))
@app.route('/preview_video/<id>', methods=['GET'])
def preview_current_video(id):
global preview_video
preview_video = id
return redirect(url_for('dashboard'))
@app.route('/light_state/<state>', methods=['GET'])
def light_state(state):
global light_state
if state in 'True':
GPIO.output(24, 1)
light_state = True
return redirect(url_for('dashboard'))
GPIO.output(24, 0)
light_state = False
return redirect(url_for('dashboard'))
@app.route('/start')
def start_loop():
task = main_routine.apply_async()
return redirect(url_for('dashboard'))
@app.route('/reboot')
def reboot_pi():
GPIO.cleanup()
Popen('reboot', shell=True)
return '<div><h1>Rebooting Pi.....</h1></div>'
@app.route('/shutdown')
def shutdown_pi():
GPIO.cleanup()
Popen('shutdown -h now', shell=True)
return '<div><h1>Shutting Down Pi.....</h1></div>'
if __name__ == '__main__':
if local:
app.run(host='localhost', port=3000)
else:
app.run(host='0.0.0.0', port=3500)
|
callowayproject/django-concepts | example/settings.py | Python | apache-2.0 | 4,239 | 0.001887 | # Django settings for example project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
import os, sys
APP = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
PROJ_ROOT = os.path.abspath(os.path.dirname(__file__))
sys.path.append(APP)
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'dev.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.abspath(os.path.join(PROJ_ROOT, 'media', 'uploads'))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/uploads/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.abspath(os.path.join(PROJ_ROOT, 'media', 'static'))
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'g2_39yupn*6j4p*cg2%w643jiq-1n_annua*%i8+rq0dx9p=$n'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, n | ot relative paths.
os.path.join(PROJ_ROOT, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contr | ib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'concepts',
'simpleapp',
)
|
CristhGunners/Photon | manage.py | Python | mit | 249 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.env | iron.setdefault("DJANGO_SETTINGS_MODULE", "photon.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys. | argv)
|
vanabo/mattress | src/products/migrations/0021_auto_20170228_1211.py | Python | mit | 395 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migratio | ns.Migration):
dependencies = [
('products', '0020_auto_20170227_1345'),
]
operations = [
migrations.RenameField(
model_name='product',
o | ld_name='categories',
new_name='category',
),
]
|
mcfletch/django-assets | django_assets/management/commands/assets.py | Python | bsd-2-clause | 4,988 | 0.001804 | """Manage assets.
Usage:
./manage.py assets build
Build all known assets; this requires tracking to be enabled: Only
assets that have previously been built and tracked are
considered "known".
./manage.py assets build --parse-templates
Try to find as many of the project's templates (hopefully all), and
check them for the use of assets. Build all the assets discovered in
this way. If tracking is enabled, the tracking database will be
replaced by the newly found assets.
./manage.py assets watch
Like ``build``, but continues to watch for changes, and builds assets
right away. Useful for cases where building takes some time.
"""
import argparse
import sys
from os import path
import logging
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from webassets.script import (CommandError as AssetCommandError,
GenericArgparseImplementation)
from django_assets.env import get_env, autoload
from django_assets.loaders import get_django_template_dirs, DjangoLoader
from django_assets.manifest import DjangoManifest # noqa: enables the --manifest django option
class Command(BaseCommand):
help = 'Manage assets.'
requires_system_checks = False
def add_arguments(self, parser):
# parser.add_argument('poll_id', nargs='+', type=str)
parser.add_argument('--parse-templates', action='store_true',
help='Search project templates to find bundles. You need '
'this if you d | irectly define your bundles in tem | plates.')
# this collects the unrecognized arguments to pass through to webassets
parser.add_argument('args', nargs=argparse.REMAINDER)
def handle(self, *args, **options):
# Due to the use of LaxOptionParser ``args`` now contains all
# unparsed options, and ``options`` those that the Django command
# has declared.
# Create log
log = logging.getLogger('django-assets')
log.setLevel({0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}[int(options.get('verbosity', 1))])
log.addHandler(logging.StreamHandler())
# If the user requested it, search for bundles defined in templates
if options.get('parse_templates'):
log.info('Searching templates...')
# Note that we exclude container bundles. By their very nature,
# they are guaranteed to have been created by solely referencing
# other bundles which are already registered.
get_env().add(*[b for b in self.load_from_templates()
if not b.is_container])
if len(get_env()) == 0:
log.info("No asset bundles were found. "
"If you are defining assets directly within your "
"templates, you want to use the --parse-templates "
"option.")
return
prog = "%s assets" % path.basename(sys.argv[0])
impl = GenericArgparseImplementation(
env=get_env(), log=log, no_global_options=True, prog=prog)
try:
# The webassets script runner may either return None on success (so
# map that to zero) or a return code on build failure (so raise
# a Django CommandError exception when that happens)
retval = impl.run_with_argv(args) or 0
if retval != 0:
raise CommandError('The webassets build script exited with '
'a non-zero exit code (%d).' % retval)
except AssetCommandError as e:
raise CommandError(e)
def load_from_templates(self):
# Using the Django loader
bundles = DjangoLoader().load_bundles()
# Using the Jinja loader, if available
try:
import jinja2
except ImportError:
pass
else:
from webassets.ext.jinja2 import Jinja2Loader, AssetsExtension
jinja2_envs = []
# Prepare a Jinja2 environment we can later use for parsing.
# If not specified by the user, put in there at least our own
# extension, which we will need most definitely to achieve anything.
_jinja2_extensions = getattr(settings, 'ASSETS_JINJA2_EXTENSIONS', False)
if not _jinja2_extensions:
_jinja2_extensions = [AssetsExtension.identifier]
jinja2_envs.append(jinja2.Environment(extensions=_jinja2_extensions))
try:
from coffin.common import get_env as get_coffin_env
except ImportError:
pass
else:
jinja2_envs.append(get_coffin_env())
bundles.extend(Jinja2Loader(get_env(),
get_django_template_dirs(),
jinja2_envs).load_bundles())
return bundles
|
mrniranjan/python-scripts | reboot/system/system4.py | Python | gpl-2.0 | 288 | 0.003472 | import os
import select
fds = os.open("data", os.O_RDONLY)
while True:
r | eads, _, _ = select.select(fds, [], [], 2.0)
if 0 < len(reads):
d = os.read(reads[0], 10)
if d:
print "-> ", d
else:
break
else:
print | "timeout"
|
damianbaran/inz | popup/publikacja.py | Python | gpl-3.0 | 16,876 | 0.03421 | # -*- coding: utf-8 -*-
################################################
## Aplikacja wspomagajaca tworzenie bazy publikacji naukowych wpsółpracujaca z Google Scholar
## Copyright (C) 2013 Damian Baran
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################
import wx
import os
import wx.xrc
import modules.baz.cDatabase as cDatabase
import linecache
###########################################################################
## Class PubDialog
###########################################################################
## Dokumentacja dla klasy
#
# Klasa zawiera widok z zarzadzaniem publikacjami
class PubDialog ( wx.Dialog ):
## Konstruktor
def __init__( self ):
wx.Dialog.__init__ ( self, None, id = wx.ID_ANY, title = u"Zarządzanie Publikacjami", pos = wx.DefaultPosition, size = wx.Size( 450,430 ), style = wx.DEFAULT_DIALOG_STYLE )
self.session = cDatabase.connectDatabase()
self.listType = []
self.getType()
ico = wx.Icon('icon/pub.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(ico)
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
bSizer28 = wx.BoxSizer( wx.VERTICAL )
bSizer21 = wx.BoxSizer( wx.VERTICAL )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, u"Dodawanie Publikacji", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE|wx.ST_NO_AUTORESIZE )
self.m_staticText1.Wrap( -1 )
bSizer21.Add( self.m_staticText1, 0, wx.EXPAND|wx.ALL, 5 )
bSizer28.Add( bSizer21, 0, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer1.Add( bSizer28, 0, wx.EXPAND, 5 )
bSizer26 = wx.BoxSizer( wx.HORIZONTAL )
bSizer15 = wx.BoxSizer( wx.VERTICAL )
bSizer3 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"Tytuł:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
bSizer3.Add( self.m_staticText2, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl2 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer3.Add( self.m_textCtrl2, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer3, 0, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer5 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText4 = wx.StaticText( self, wx.ID_ANY, u"Autorzy:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText4.Wrap( -1 )
bSizer5.Add( self.m_staticText4, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl4 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer5.Add( self.m_textCtrl4, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer5, 0, wx.EXPAND, 5 )
bSizer4 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText3 = wx.StaticText( self, wx.ID_ANY, u"Cytowania:", w | x.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText3.Wrap( -1 )
bSizer4.Add( self.m_staticText3, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl3 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.Defa | ultPosition, wx.Size( 145,-1 ), 0 )
bSizer4.Add( self.m_textCtrl3, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer4, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.EXPAND, 5 )
bSizer6 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText5 = wx.StaticText( self, wx.ID_ANY, u"Typ:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText5.Wrap( -1 )
bSizer6.Add( self.m_staticText5, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
m_choice1Choices = self.listType
self.m_choice1 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 145,-1 ), m_choice1Choices, 0 )
self.m_choice1.SetSelection( 0 )
bSizer6.Add( self.m_choice1, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer6, 0, wx.EXPAND, 5 )
bSizer7 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText6 = wx.StaticText( self, wx.ID_ANY, u"Rok:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText6.Wrap( -1 )
bSizer7.Add( self.m_staticText6, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl5 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer7.Add( self.m_textCtrl5, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer7, 0, wx.EXPAND, 5 )
bSizer8 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText7 = wx.StaticText( self, wx.ID_ANY, u"DOI:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText7.Wrap( -1 )
bSizer8.Add( self.m_staticText7, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl6 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer8.Add( self.m_textCtrl6, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer8, 0, wx.EXPAND, 5 )
bSizer29 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText9 = wx.StaticText( self, wx.ID_ANY, u"Inny klucz:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText9.Wrap( -1 )
bSizer29.Add( self.m_staticText9, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_textCtrl7 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer29.Add( self.m_textCtrl7, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer29, 0, wx.EXPAND, 5 )
bSizer9 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText8 = wx.StaticText( self, wx.ID_ANY, u"Wydawca:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText8.Wrap( -1 )
bSizer9.Add( self.m_staticText8, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
m_choice2Choices = cDatabase.getJournalName(self.session)
self.m_choice2 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 145,-1 ), m_choice2Choices, 0 )
bSizer9.Add( self.m_choice2, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer9, 0, wx.EXPAND, 5 )
bSizer17 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText10 = wx.StaticText( self, wx.ID_ANY, u"Źródło:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText10.Wrap( -1 )
bSizer17.Add( self.m_staticText10, 1, wx.ALL, 5 )
self.m_textCtrl71 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
bSizer17.Add( self.m_textCtrl71, 0, wx.BOTTOM|wx.RIGHT|wx.LEFT, 5 )
bSizer15.Add( bSizer17, 1, wx.EXPAND, 5 )
bSizer18 = wx.BoxSizer( wx.HORIZONTAL )
self.m_staticText99 = wx.StaticText( self, wx.ID_ANY, u"LMCP:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText99.Wrap( -1 )
bSizer18.Add( self.m_staticText99, 1, wx.ALL, 5 )
self.m_textCtrl99 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 145,-1 ), 0 )
|
tmtowtdi/MontyLacuna | gui/ORIGships/lib/gui/ui_about.py | Python | mit | 2,866 | 0.003838 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/about.ui'
#
# Created: Thu Jun 4 15:32:51 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_About(object):
def setupUi(self, About):
About.setObjectName("About")
| About.resize(400, 300)
self.verticalLayout_2 = QtGui.QVBoxLayout(About)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
| self.lbl_appname = QtGui.QLabel(About)
self.lbl_appname.setObjectName("lbl_appname")
self.verticalLayout.addWidget(self.lbl_appname)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.lbl_pyside_ver = QtGui.QLabel(About)
self.lbl_pyside_ver.setObjectName("lbl_pyside_ver")
self.verticalLayout.addWidget(self.lbl_pyside_ver)
self.lbl_qt_ver = QtGui.QLabel(About)
self.lbl_qt_ver.setObjectName("lbl_qt_ver")
self.verticalLayout.addWidget(self.lbl_qt_ver)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem1)
self.lbl_copyright = QtGui.QLabel(About)
self.lbl_copyright.setObjectName("lbl_copyright")
self.verticalLayout.addWidget(self.lbl_copyright)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.buttonBox = QtGui.QDialogButtonBox(About)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
self.retranslateUi(About)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), About.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), About.reject)
QtCore.QMetaObject.connectSlotsByName(About)
def retranslateUi(self, About):
About.setWindowTitle(QtGui.QApplication.translate("About", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.lbl_appname.setText(QtGui.QApplication.translate("About", "App Name and Version", None, QtGui.QApplication.UnicodeUTF8))
self.lbl_pyside_ver.setText(QtGui.QApplication.translate("About", "PySide Version", None, QtGui.QApplication.UnicodeUTF8))
self.lbl_qt_ver.setText(QtGui.QApplication.translate("About", "QtCore Version", None, QtGui.QApplication.UnicodeUTF8))
self.lbl_copyright.setText(QtGui.QApplication.translate("About", "Copyright", None, QtGui.QApplication.UnicodeUTF8))
|
todaychi/hue | apps/impala/src/impala/dashboard_api.py | Python | apache-2.0 | 22,801 | 0.01 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import numbers
import re
import time
from datetime import datetime, timedelta
from itertools import groupby
from django.utils.html import escape
from notebook.models import make_notebook
from notebook.connectors.base import get_api, OperationTimeout
from dashboard.models import Collection2, augment_response
LOG = logging.getLogger(__name__)
LIMIT = 100
class MockRequest():
def __init__(self, user):
self.user = user
# To Split in Impala, DBMS..
# To inherit from DashboardApi
class SQLApi():
def __init__(self, user, engine):
self.user = user
self.engine = engine
self.async = engine == 'hive' or engine == 'impala'
def query(self, dashboard, query, facet=None):
database, table = self._get_database_table_names(dashboard['name'])
if query['qs'] == [{'q': '_root_:*'}]:
return {'response': {'numFound': 0}}
filters = [q['q'] for q in query['qs'] if q['q']]
filters.extend(self._get_fq(dashboard, query, facet))
timeFilter = self._get_time_filter_query(dashboard, query)
if timeFilter:
filters.append(timeFilter)
if facet:
if facet['type'] == 'nested':
fields_dimensions = [self._get_dimension_field(f)['name'] for f in self._get_dimension_fields(facet)]
last_dimension_seen = False
fields = []
for f in reversed(facet['properties']['facets']):
if f['aggregate']['function'] == 'count':
if not last_dimension_seen:
fields.insert(0, 'COUNT(*) AS Count')
last_dimension_seen = True
fields.insert(0, self._get_dimension_field(f)['select'])
else:
if not last_dimension_seen:
fields.insert(0, self._get_aggregate_function(f))
if not last_dimension_seen:
fields.insert(0, 'COUNT(*) as Count')
fields.insert(0, self._get_dimension_field(facet)['select'])
sql = '''SELECT %(fields)s
FROM %(database)s.%(table)s
%(filters)s
GROUP BY %(fields_dimensions)s
ORDER BY %(order_by)s
LIMIT %(limit)s''' % {
'database': database,
'table': table,
'fields': ', '.join(fields),
'fields_dimensions': ', '.join(fields_dimensions),
'order_by': ', '.join([self._get_dimension_field(f)['order_by'] for f in self._get_dimension_fields(facet)]),
'filters': self._convert_filters_to_where(filters),
'limit': LIMIT
}
elif facet['type'] == 'function': # 1 dim only now
sql = '''SELECT %(fields)s
FROM %(database)s.%(table)s
%(filters)s''' % {
'database': database,
'table': table,
'fields': self._get_aggregate_function(facet),
'filters': self._convert_filters_to_where(filters),
}
else:
fields = Collection2.get_field_list(dashboard)
sql = "SELECT %(fields)s FROM `%(database)s`.`%(table)s`" % {
'database': database,
'table': table,
'fields': ', '.join(['`%s`' % f if f != '*' else '*' for f in fields])
}
if filters:
sql += ' ' + self._convert_filters_to_where(filters)
sql += ' LIMIT %s' % LIMIT
editor = make_notebook(
name='Execute and watch',
editor_type=dashboard['engine'],
statement=sql,
database=database,
status='ready-execute',
skip_historify=True
)
response = editor.execute(MockRequest(self.user))
if 'handle' in response and response['handle'].get('sync'):
response['result'] = self._convert_result(response['result'], dashboard, facet, query)
return response
def fetch_result(self, dashboard, query, facet):
notebook = {}
snippet = facet['queryResult']
start_over = True # TODO
result = get_api(MockRequest(self.user), snippet).fetch_result(
notebook,
snippet,
dashboard['template']['rows'],
start_over=start_over
)
return self._convert_result(result, dashboard, facet, query)
def datasets(self, show_all=False):
snippet = {'type': self.engine}
return [table['name'] for table in get_api(MockRequest(self.user), snippet).autocomplete(snippet, database='default')['tables_meta']]
def fields(self, dashboard):
database, table = self._get_database_table_names(dashboard)
snippet = {'type': self.engine}
table_metadata = get_api(MockRequest(self.user), snippet).autocomplete(snippet, database, table)
return {
'schema': {
'fields':
dict([(col['name'], {
'name': str(escape(col['name'])),
'type': str(col['type']),
'uniqueKey': col.get('primary_key') == 'true',
# 'dynamicBase': False,
'indexed': False,
'stored': True,
'required': col.get('primary_key') == 'true'
})
for col in table_metadata['extended_columns']]
)
}
}
def schema_fields(self, collection):
return {
'fields': [f for f in self.fields(collection)['schema']['fields'].itervalues()]
}
def luke(self, collection):
fields = self.schema_fields(collection)
return {'fields': Collection2._make_luke_from_schema_fields(fields)}
def stats(self, dataset, fields, query, facet):
database, table = self._get_database_table_names(dataset)
# TODO: check column stats to go faster
sql = "SELECT MIN(`%(field)s`), MAX(`%(field)s`) FROM `%(database)s`.`%(table)s`" % {
'field': fields[0],
'database': database,
'table': table
}
result = self._sync_execute(sql, database)
if result:
stats = list(result['data'])
min_value, max_value = stats[0]
if not isinstance(min_value, numbers.Number):
min_value = min_value.replace(' ', 'T') + 'Z'
max_value = max_value.replace(' ', 'T') + 'Z'
return {
'stats': {
'stats_fields': {
fields[0]: {
| 'min': min_value,
'max': max_value,
}
}
}
}
def get(self, dashboard, doc_id):
database, table = | self._get_database_table_names(dashboard['name'])
field = self._get_field(dashboard, dashboard['idField'])
quotes = '' if self._is_number(field['type']) else "'"
sql = "SELECT * FROM `%(database)s`.`%(table)s` WHERE `%(idField)s` = %(quotes)s%(doc_id)s%(quotes)s" % {
'database': database,
'table': table,
'idField': dashboard['idField'], # Only 1 PK currently,
'doc_id': doc_id,
'quotes': quotes
}
result = self._sync_execute(sql, database)
if result:
cols = [col['name'] for col in result['meta']]
rows = list(result['data']) # No escape_rows
doc_data = dict([(header, cell) for row in rows for header, cell in zip(cols, row)])
else:
doc_data = {}
return {
"doc": doc_data
}
def _sync_execute(self, sql, database):
editor = make_notebook(
name='Execute and watch',
editor_type=self.engine,
statement=sql,
database=database,
status='ready-execute',
skip_historify=True
# async=False
)
request = MockRequest(self.user)
mock_notebook = {}
snippet = {'type': self.engine}
response = editor.execute(request)
if |
visionegg/visionegg | demo/color_grating.py | Python | lgpl-2.1 | 2,726 | 0.023844 | #!/usr/bin/env python
"""Colored sine wave grating in circular mask"""
############################
# Import various modules #
############################
import VisionEgg
VisionEgg.start_default_logging(); VisionEgg.watch_exceptions()
from VisionEgg.Core import *
from VisionEgg.FlowControl import Presentation, FunctionController
from VisionEgg.Gratings import SinGrating2D
from VisionEgg.Textures import Mask2D
from math import *
#####################################
# Initialize OpenGL window/screen #
#####################################
screen = get_default_screen()
######################################
# Create sinusoidal grating object #
######################################
mask = Mask2D(function='circle', # also supports 'circle'
radius_parameter=100, # sigma for gaussian, radius for circle (units: num_samples)
num_samples=(256,256)) # this many texture elements in mask (covers whole size specified below)
# NOTE: I am not a color scientist, and I am not familiar with the
# needs of color scientists. Color interpolation is currently done in
# RGB space, but I assume there are other interpolation methods that
# people may want. Please submit any suggestions.
stimulus = SinGrating2D(color1 = (0.5, 0.25, 0.5), # RGB (alpha ignored if given)
color2 = (1.0, 0.5, 0.1), # RGB (alpha ignored if given)
contrast = 0.2,
| pedestal = 0.1,
mask = mask, # optional
position = ( screen.size[0]/2.0, screen.size[1]/2.0 ),
anchor = 'center',
size = ( 300.0 , 300.0 ),
spatial_freq = 20.0 / screen.size[0], # units of cycles/pixel
temporal_freq_hz = | 1.0,
orientation = 270.0 )
def pedestal_func(t):
# Calculate pedestal over time. (Pedestal range [0.1,0.9] and
# contrast = 0.2 limits total range to [0.0,1.0])
temporal_freq_hz = 0.2
return 0.4 * sin(t*2*pi * temporal_freq_hz) + 0.5
###############################################################
# Create viewport - intermediary between stimuli and screen #
###############################################################
viewport = Viewport( screen=screen, stimuli=[stimulus] )
########################################
# Create presentation object and go! #
########################################
p = Presentation(go_duration=(10.0,'seconds'),viewports=[viewport])
p.add_controller(stimulus,'pedestal',FunctionController(during_go_func=pedestal_func))
p.go()
|
mscuthbert/abjad | abjad/tools/pitchtools/test/test_pitchtools_Octave_from_pitch_name.py | Python | gpl-3.0 | 267 | 0.003745 | # -*- encoding: utf-8 -*-
from abjad import *
|
def test_pitchtools_Octave_from_pitch_name_01():
assert pitchtools.Octave.from_pitch_name("cs'") == 4
assert pitchtools.Octave.from_pitch_name('cs') == 3
| assert pitchtools.Octave.from_pitch_name('cs,') == 2 |
Elico-Corp/odoo_OCB | addons/l10n_fr_certification/__init__.py | Python | agpl-3.0 | 1,524 | 0.004593 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import models
from openerp import api, SUPERUSER_ID
def _setup_inalterability(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
# enable ping for this module
env['publisher_warranty.contract'].update_notification(cron_mode=True)
# make sure account_cancel is not usable at the same time as l10n_fr
# FORWARD PORT NOTICE
# In master as of March 2017, RCO-ODOO coded an exclusive field on modules to flag incompatibility
wanted_states = ['installed', 'to upgrade', 'to install']
account_cancel_module = env['ir.module.module'].search([('name', '=', 'account_cancel')], limit=1)
if account_cancel_module and account_cancel_module.state in wanted_states:
views_xml_id = env['ir.model.data'].search([('module', '=', 'account_cancel'), ('model', '=', 'ir.ui.view')])
ir_views = env['ir.ui.view'].browse([v.res_id for v in views_xml_id])
| for cancel_view in ir_views:
cancel_view.write({'active': False})
fr_c | ompanies = env['res.company'].search([('partner_id.country_id.code', '=', 'FR')])
if fr_companies:
# create the securisation sequence per company
fr_companies._create_secure_sequence()
#reset the update_posted field on journals
journals = env['account.journal'].search([('company_id', 'in', fr_companies.ids)])
for journal in journals:
journal.write({'update_posted': False})
|
pombredanne/voc | tests/builtins/test_abs.py | Python | bsd-3-clause | 566 | 0 | from .. utils import TranspileTestCase, BuiltinFuncti | onTestCase
class AbsTests(TranspileTestCase):
def test_abs_not_implemented(self):
self.assertCodeExecution("""
class NotAbsLike:
pass
x = NotAbsLike()
try:
print(abs(x))
except TypeError as err:
print(err)
""")
class BuiltinAbsFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["abs"]
not_implemented = [
'test_class',
| 'test_frozenset',
]
|
davidyezsetz/kuma | vendor/packages/Werkzeug/docs/conf.py | Python | mpl-2.0 | 6,395 | 0.005786 | # -*- coding: utf-8 -*-
#
# Werkzeug documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 16 23:10:43 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.append(os.path.abspath('.'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'werkzeugext']
# Add any paths that c | ontain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the p | roject.
project = u'Werkzeug'
copyright = u'2009, The Werkzeug Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
import re
from werkzeug import __version__ as release
if 'dev' in release:
release = release[:release.find('dev') + 3]
version = re.match(r'\d+\.\d+(?:\.\d+)?', release).group()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'tango'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Werkzeugdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'Werkzeug.tex', ur'Werkzeug Documentation',
ur'The Werkzeug Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'http://docs.python.org/dev': None,
'http://beta.pylonshq.com/docs/en/0.9.7': None,
'http://www.sqlalchemy.org/docs/05': None
}
|
savoirfairelinux/account-financial-tools | account_cancel_invoice_check_voucher/account_invoice.py | Python | agpl-3.0 | 2,758 | 0.001813 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2012 Camptocamp (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Vincent Renaville (Camptocamp)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later ver | sion.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GN | U Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import osv, orm
class account_invoice(orm.Model):
_inherit = "account.invoice"
def action_cancel(self, cr, uid, ids, context=None):
invoices = self.read(cr, uid, ids, ['move_id', 'payment_ids'])
for invoice in invoices:
if invoice['move_id']:
# This invoice have a move line, we search move_line concerned by this move
cr.execute("""SELECT abs.name AS statement_name,
abs.date AS statement_date,
absl.name
FROM account_bank_statement_line AS absl
INNER JOIN account_bank_statement AS abs
ON absl.statement_id = abs.id
WHERE EXISTS (SELECT 1
FROM account_voucher_line JOIN account_move_line ON
(account_voucher_line.move_line_id = account_move_line.id)
WHERE voucher_id=absl.voucher_id
AND account_move_line.move_id = %s )""",
(invoice['move_id'][0],))
statement_lines = cr.dictfetchone()
if statement_lines:
raise osv.except_osv(
_('Error!'),
_('Invoice already imported in bank statment (%s) at %s on line %s'
% (statement_lines['statement_name'],
statement_lines['statement_date'],
statement_lines['name'],)))
return super(account_invoice, self).action_cancel(cr, uid, ids, context=context)
|
espenak/enkel | testsuite/unittest_tpl.py | Python | gpl-2.0 | 1,046 | 0.005736 | # This file is part of the Enkel web programming library.
#
# Copyright (C) 2007 Espen Angell Kristiansen (espen@wsgi.net)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOS | E. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc. | , 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from unittest import TestCase
from enkel.wansgli.testhelpers import unit_case_suite, run_suite
class Test(TestCase):
def suite():
return unit_case_suite(Test)
if __name__ == '__main__':
run_suite(suite())
|
stackforge/blazar | blazar/opts.py | Python | apache-2.0 | 1,861 | 0 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imp | ort itertools
import blazar.api.v2.app
import blazar.api.v2.controllers
import blazar.cmd.api
import blazar.config
import blazar.db.base
import blazar.db.migration.cli
import blazar.manager
import blazar.manager.service
import blazar.notification.notifier
import blazar.plugins.oshosts.host_plugin
import blazar.utils.openstack.keystone
import blazar.utils.openstack.nova
def list_opts():
| return [
('DEFAULT',
itertools.chain(
blazar.api.v2.app.auth_opts,
blazar.cmd.api.opts,
blazar.config.api_opts,
blazar.config.cli_opts,
blazar.config.lease_opts,
blazar.config.os_opts,
blazar.db.base.db_driver_opts,
blazar.db.migration.cli.command_opts,
blazar.utils.openstack.keystone.opts,
blazar.utils.openstack.keystone.keystone_opts)),
('api', blazar.api.v2.controllers.api_opts),
('manager', itertools.chain(blazar.manager.opts,
blazar.manager.service.manager_opts)),
('notifications', blazar.notification.notifier.notification_opts),
('nova', blazar.utils.openstack.nova.nova_opts),
(blazar.plugins.oshosts.RESOURCE_TYPE,
blazar.plugins.oshosts.host_plugin.plugin_opts),
]
|
jss-emr/openerp-7-src | openerp/addons/account/wizard/account_unreconcile.py | Python | agpl-3.0 | 2,140 | 0.002336 | # -*- coding: utf-8 -*-
################################################################ | ##############
#
# OpenERP, Open Source Manag | ement Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class account_unreconcile(osv.osv_memory):
_name = "account.unreconcile"
_description = "Account Unreconcile"
def trans_unrec(self, cr, uid, ids, context=None):
obj_move_line = self.pool.get('account.move.line')
if context is None:
context = {}
if context.get('active_ids', False):
obj_move_line._remove_move_reconcile(cr, uid, context['active_ids'], context=context)
return {'type': 'ir.actions.act_window_close'}
account_unreconcile()
class account_unreconcile_reconcile(osv.osv_memory):
_name = "account.unreconcile.reconcile"
_description = "Account Unreconcile Reconcile"
def trans_unrec_reconcile(self, cr, uid, ids, context=None):
obj_move_reconcile = self.pool.get('account.move.reconcile')
if context is None:
context = {}
rec_ids = context['active_ids']
if rec_ids:
obj_move_reconcile.unlink(cr, uid, rec_ids, context=context)
return {'type': 'ir.actions.act_window_close'}
account_unreconcile_reconcile()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
lekshmideepu/nest-simulator | pynest/nest/lib/hl_api_simulation.py | Python | gpl-2.0 | 15,584 | 0.001091 | # -*- coding: utf-8 -*-
#
# hl_api_simulation.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions for simulation control
"""
from contextlib import contextmanager
import warnings
from ..ll_api import *
from .hl_api_helper import *
from .hl_api_parallel_computing import Rank
__all__ = [
'Cleanup',
'DisableStructuralPlasticity',
'EnableStructuralPlasticity',
'GetKernelStatus',
'Install',
'Prepare',
'ResetKernel',
'Run',
'RunManager',
'SetKernelStatus',
'Simulate',
]
@check_stack
def Simulate(t):
"""Simulate the network for `t` milliseconds.
Parameters
----------
t : float
Time to simulate in ms
See Also
--------
RunManager
"""
sps(float(t))
sr('ms Simulate')
@check_stack
def Run(t):
"""Simulate the network for `t` milliseconds.
Parameters
----------
t : float
Time to simulate in ms
Notes
------
Call between `Prepare` and `Cleanup` calls, or within a
``with RunManager`` clause.
Simulate(t): t' = t/m; Prepare(); for _ in range(m): Run(t'); Cleanup()
`Prepare` must be called before `Run` to calibrate the system, and
`Cleanup` must be called after `Run` to close files, cleanup handles, and
so on. After `Cleanup`, `Prepare` can and must be called before more `Run`
calls.
Be careful about modifying the network or neurons between `Prepare` and `Cleanup`
calls. In particular, do not call `Create`, `Connect`, or `SetKernelStatus`.
Calling `SetStatus` to change membrane potential `V_m` of neurons or synaptic
weights (but not delays!) will in most cases work as expected, while changing
membrane or synaptic times constants will not work correctly. If in doubt, assume
that changes may cause undefined behavior and check these thoroughly.
See Also
--------
Prepare, Cleanup, RunManager, Simulate
"""
sps(float(t))
sr('ms Run')
@check_stack
def Prepare():
"""Calibrate the system before a `Run` call. Not needed for `Simulate`.
Call before the first `Run` call, or before calling `Run` after changing
the system, calling `SetStatus` or `Cleanup`.
See Also
--------
Run, Cleanup
"""
sr('Prepare')
@check_stack
def Cleanup():
"""Cleans up resources after a `Run` call. Not needed for `Simulate`.
Closes state for a series of runs, such as flushing and closing files.
A `Prepare` is needed after a `Cleanup` before any more calls to `Run`.
See Also
--------
Run, Prepare
"""
sr('Cleanup')
@contextmanager
def RunManager():
"""ContextManager for `Run`
Calls `Prepare` before a series of `Run` calls, and calls `Cleanup` at end.
E.g.:
::
with RunManager():
for _ in range(10):
Run(100)
# extract results
Notes
-----
Be careful about modifying the network or neurons inside the `RunManager` context.
In particular, do not call `Create`, `Connect`, or `SetKernelStatus`. Calling `SetStatus`
to change membrane potential `V_m` of neurons or synaptic weights (but not delays!)
will in most cases work as expected, while changing membrane or synaptic times
constants will not work correctly. If in doubt, assume that cha | nges may cause
undefined behavior and check these thoroughly.
See Also
| --------
Prepare, Run, Cleanup, Simulate
"""
Prepare()
try:
yield
finally:
Cleanup()
@check_stack
def ResetKernel():
"""Reset the simulation kernel.
This will destroy the network as well as all custom models created with
:py:func:`.CopyModel`. Calling this function is equivalent to restarting NEST.
In particular,
* all network nodes
* all connections
* all user-defined neuron and synapse models
are deleted, and
* time
* random generators
are reset. The only exception is that dynamically loaded modules are not
unloaded. This may change in a future version of NEST.
"""
sr('ResetKernel')
@check_stack
def SetKernelStatus(params):
r"""Set parameters for the simulation kernel.
Parameters
----------
params : dict
Dictionary of parameters to set.
**Note**
All NEST kernel parameters are described below, grouped by topic.
Some of them only provide information about the kernel status and
cannot be set by the user. These are marked as *read only* and can
be accessed using ``GetKernelStatus``.
**Time and resolution**
Parameters
----------
resolution : float, default: 0.1
The resolution of the simulation (in ms)
time : float
The current simulation time (in ms)
to_do : int, read only
The number of steps yet to be simulated
max_delay : float, default: 0.1
The maximum delay in the network
min_delay : float, default: 0.1
The minimum delay in the network
ms_per_tic : float, default: 0.001
The number of milliseconds per tic
tics_per_ms : float, default: 1000.0
The number of tics per millisecond
tics_per_step : int, default: 100
The number of tics per simulation time step
T_max : float, read only
The largest representable time value
T_min : float, read only
The smallest representable time value
**Random number generators**
Parameters
----------
rng_types : list, read only
Names of random number generator types available.
Types: "Philox_32", "Philox_64", "Threefry_32", "Threefry_64", "mt19937", "mt19937_64"
rng_type : str, default: mt19937_64
Name of random number generator type used by NEST.
rng_seed : int, default: 143202461
Seed value used as base for seeding NEST random number generators
(:math:`1 \leq s \leq 2^{32}-1`).
**Parallel processing**
Parameters
----------
total_num_virtual_procs : int, default: 1
The total number of virtual processes
local_num_threads : int, default: 1
The local number of threads
num_processes : int, read only
The number of MPI processes
off_grid_spiking : bool, read only
Whether to transmit precise spike times in MPI communication
**MPI buffers**
Parameters
----------
adaptive_spike_buffers : bool, default: True
Whether MPI buffers for communication of spikes resize on the fly
adaptive_target_buffers : bool, default: True
Whether MPI buffers for communication of connections resize on the fly
buffer_size_secondary_events : int, read only
Size of MPI buffers for communicating secondary events (in bytes, per
MPI rank, for developers)
buffer_size_spike_data : int, default: 2
Total size of MPI buffer for communication of spikes
buffer_size_target_data : int, default: 2
Total size of MPI buffer for communication of connections
growth_factor_buffer_spike_data : float, default: 1.5
If MPI buffers for communication of spikes resize on the fly, grow
them by this factor each round
growth_factor_buffer_target_data : float, default: 1.5
If MPI buffers for communication of connections resize on the fly, grow
them by this factor each round
max_buffer_size_spike_data : int, default: 8388608
Maximal size of MPI buffers for communication of spikes.
max_buffer_size_target_data : int, default: 16777216
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.