repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
skarphed/skarphed | admin/src/skarphedadmin/gui/skarphed/Role.py | Python | agpl-3.0 | 4,713 | 0.01719 | #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
import pygtk
pygtk.require("2.0")
import gtk
from GenericObject import ObjectPageAbstract
from GenericObject import PageFrame
from GenericObject import FrameLabel
from skarphedadmin.gui import IconStock
from skarphedadmin.glue.lng import _
class RolePage(ObjectPageAbstract):
def __init__(self,parent,role):
ObjectPageAbstract.__init__(self,parent,role)
self.roleId = role.getLocalId()
role.fetchPermissions()
self.headline = gtk.Label()
self.pack_start(self.headline,False)
self.info = PageFrame(self,_("Information"), IconStock.ROLE)
self.infobox = gtk.VBox()
self.info.add(self.infobox)
self.pack_start(self.info,False)
self.perm = PageFrame(self,_("Permissions"), IconStock.PERMISSION)
self.permbox = gtk.Table(1,2,False)
self.permbox.set_row_spacings(10)
self.permbox.set_col_spacings(10)
self.permbox.set_border_width(10)
self.perm_permlabel = FrameLabel(self,_("Please choose the Permissions you want to assign to the user here:"), IconStock.PERMISSION)
self.perm_permlistview = gtk.TreeView()
self.perm_permlist = gtk.ListStore(int, str,str)
self.perm_permlistview.set_model(self.perm_permlist)
self.perm_permlist_col_checkbox = gtk.TreeViewColumn('')
self.perm_permlist_col_identifier = gtk.TreeViewColumn(_('Permission Identifier'))
self.perm_permlist_col_name = gtk.TreeViewColumn(_('Permission Name'))
self.perm_permlistview.append_column(self.perm_permlist_col_checkbox)
self.perm_permlistview.append_column(self.perm_permlist_col_identifier)
self.perm_per | mlistview.append_column(self.perm_permlist_col_name)
self.perm_permlist_renderer_checkbox= gtk.CellRendererToggle()
self.perm_permlist_renderer_identifier = gtk.CellRendererText()
self.perm_permlist_renderer_name = gtk.CellRendererText()
self.perm_permlist_col_checkbox.pack_start(self.perm_permlist_renderer_checkbox)
self.perm_permlist_col_identifier.pack_start(self.perm_permlist_renderer_ident | ifier)
self.perm_permlist_col_name.pack_start(self.perm_permlist_renderer_name)
self.perm_permlist_col_checkbox.add_attribute(self.perm_permlist_renderer_checkbox,'active',0)
self.perm_permlist_col_identifier.add_attribute(self.perm_permlist_renderer_identifier,'text',1)
self.perm_permlist_col_name.add_attribute(self.perm_permlist_renderer_name,'text',2)
self.perm_permlist_renderer_checkbox.set_activatable(True)
self.perm_permlist_renderer_checkbox.connect("toggled",self.toggledRight)
self.permbox.attach(self.perm_permlabel,0,1,0,1)
self.permbox.attach(self.perm_permlistview,0,1,1,2)
self.perm.add(self.permbox)
self.pack_start(self.perm,False)
self.show_all()
self.render()
def render(self):
role = self.getMyObject()
if not role:
return
self.headline.set_markup(_("<b>Edit Role: "+role.getName()+"</b>"))
if role.permissiondata is not None:
self.perm_permlist.clear()
for permission in role.permissiondata:
self.perm_permlist.append((int(permission['granted']),str(permission['right']),''))
def toggledRight(self,renderer = None, path = None):
rowiter = self.perm_permlist.get_iter(path)
perm = self.perm_permlist.get_value(rowiter,1)
val = 1-self.perm_permlist.get_value(rowiter,0)
role = self.getApplication().getLocalObjectById(self.roleId)
if val == 1:
role.assignPermission(perm)
else:
role.removePermission(perm)
|
pcu4dros/pandora-core | workspace/lib/python3.5/site-packages/sqlalchemy/testing/suite/test_types.py | Python | mit | 24,316 | 0.000082 | # coding: utf-8
from .. import fixtures, config
from ..assertions import eq_
from ..config import requirements
from sqlalchemy import Integer, Unicode, UnicodeText, select
from sqlalchemy import Date, DateTime, Time, MetaData, String, \
Text, Numeric, Float, literal, Boolean, cast, null, JSON, and_
from ..schema import Table, Column
from ... import testing
import decimal
import datetime
from ...util import u
from ... import util
class _LiteralRoundTripFixture(object):
@testing.provide_metadata
def _literal_round_trip(self, type_, input_, output, filter_=None):
"""test literal rendering """
# for literal, we test the literal render in an INSERT
# into a typed column. we can then SELECT it back as its
# official type; ideally we'd be able to use CAST here
# but MySQL in particular can't CAST fully
t = Table('t', self.metadata, Column('x', type_))
t.create()
for value in input_:
ins = t.insert().values(x=literal(value)).compile(
dialect=testing.db.dialect,
compile_kwargs=dict(literal_binds=True)
)
testing.db.execute(ins)
for row in t.select().execute():
value = row[0]
if filter_ is not None:
value = filter_(value)
assert value in output
class _UnicodeFixture(_LiteralRoundTripFixture):
__requires__ = 'unicode_data',
data = u("Alors vous imaginez ma surprise, au lever du jour, "
"quand une drôle de petite voix m’a réveillé. Elle "
"disait: « S’il vous plaît… dessine-moi un mouton! »")
@classmethod
def define_tables(cls, metadata):
Table('unicode_table', me | tadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('unicode_data', cls.datatype),
)
def test_round_trip(self):
unicode_table = self.tables.unicode_table
config.db.execu | te(
unicode_table.insert(),
{
'unicode_data': self.data,
}
)
row = config.db.execute(
select([
unicode_table.c.unicode_data,
])
).first()
eq_(
row,
(self.data, )
)
assert isinstance(row[0], util.text_type)
def test_round_trip_executemany(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
[
{
'unicode_data': self.data,
}
for i in range(3)
]
)
rows = config.db.execute(
select([
unicode_table.c.unicode_data,
])
).fetchall()
eq_(
rows,
[(self.data, ) for i in range(3)]
)
for row in rows:
assert isinstance(row[0], util.text_type)
def _test_empty_strings(self):
unicode_table = self.tables.unicode_table
config.db.execute(
unicode_table.insert(),
{"unicode_data": u('')}
)
row = config.db.execute(
select([unicode_table.c.unicode_data])
).first()
eq_(row, (u(''),))
def test_literal(self):
self._literal_round_trip(self.datatype, [self.data], [self.data])
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data',
__backend__ = True
datatype = Unicode(255)
@requirements.empty_strings_varchar
def test_empty_strings_varchar(self):
self._test_empty_strings()
class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest):
__requires__ = 'unicode_data', 'text_type'
__backend__ = True
datatype = UnicodeText()
@requirements.empty_strings_text
def test_empty_strings_text(self):
self._test_empty_strings()
class TextTest(_LiteralRoundTripFixture, fixtures.TablesTest):
__requires__ = 'text_type',
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table('text_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('text_data', Text),
)
def test_text_roundtrip(self):
text_table = self.tables.text_table
config.db.execute(
text_table.insert(),
{"text_data": 'some text'}
)
row = config.db.execute(
select([text_table.c.text_data])
).first()
eq_(row, ('some text',))
def test_text_empty_strings(self):
text_table = self.tables.text_table
config.db.execute(
text_table.insert(),
{"text_data": ''}
)
row = config.db.execute(
select([text_table.c.text_data])
).first()
eq_(row, ('',))
def test_literal(self):
self._literal_round_trip(Text, ["some text"], ["some text"])
def test_literal_quoting(self):
data = '''some 'text' hey "hi there" that's text'''
self._literal_round_trip(Text, [data], [data])
def test_literal_backslashes(self):
data = r'backslash one \ backslash two \\ end'
self._literal_round_trip(Text, [data], [data])
class StringTest(_LiteralRoundTripFixture, fixtures.TestBase):
__backend__ = True
@requirements.unbounded_varchar
def test_nolength_string(self):
metadata = MetaData()
foo = Table('foo', metadata,
Column('one', String)
)
foo.create(config.db)
foo.drop(config.db)
def test_literal(self):
self._literal_round_trip(String(40), ["some text"], ["some text"])
def test_literal_quoting(self):
data = '''some 'text' hey "hi there" that's text'''
self._literal_round_trip(String(40), [data], [data])
def test_literal_backslashes(self):
data = r'backslash one \ backslash two \\ end'
self._literal_round_trip(String(40), [data], [data])
class _DateFixture(_LiteralRoundTripFixture):
compare = None
@classmethod
def define_tables(cls, metadata):
Table('date_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('date_data', cls.datatype),
)
def test_round_trip(self):
date_table = self.tables.date_table
config.db.execute(
date_table.insert(),
{'date_data': self.data}
)
row = config.db.execute(
select([
date_table.c.date_data,
])
).first()
compare = self.compare or self.data
eq_(row,
(compare, ))
assert isinstance(row[0], type(compare))
def test_null(self):
date_table = self.tables.date_table
config.db.execute(
date_table.insert(),
{'date_data': None}
)
row = config.db.execute(
select([
date_table.c.date_data,
])
).first()
eq_(row, (None,))
@testing.requires.datetime_literals
def test_literal(self):
compare = self.compare or self.data
self._literal_round_trip(self.datatype, [self.data], [compare])
class DateTimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime',
__backend__ = True
datatype = DateTime
data = datetime.datetime(2012, 10, 15, 12, 57, 18)
class DateTimeMicrosecondsTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'datetime_microseconds',
__backend__ = True
datatype = DateTime
data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396)
class TimeTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'time',
__backend__ = True
datatype = Time
data = datetime.time(12, 57, 18)
class TimeMicrosecondsTest(_DateFixture, fixtures.TablesTest):
__requires__ = 'time_microseconds',
__backend__ = True
datatype = Time
|
amboutin/GCP | monitoring/api/v3/api-client/list_resources_test.py | Python | apache-2.0 | 2,187 | 0 | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Integration test for list_env.py
GOOGLE_APPLICATION_CREDENTIALS must be set to | a Service Account for a project
that has enabled the Monitoring API.
Currently the TEST_PROJECT_ID is hard-coded to run using the project created
for this test, but it could be changed to a different project.
"""
import os
import re
from gcp.testing.flaky import flaky
import googleapiclient.discovery
import pytest
import list_resources
PROJECT = os.environ['GCLOUD_PROJECT']
METRIC = 'compute.googleapis.com/instance/cpu/usage_time'
@pytest.fixture(scope='mo | dule')
def client():
return googleapiclient.discovery.build('monitoring', 'v3')
@flaky
def test_list_monitored_resources(client, capsys):
PROJECT_RESOURCE = "projects/{}".format(PROJECT)
list_resources.list_monitored_resource_descriptors(
client, PROJECT_RESOURCE)
stdout, _ = capsys.readouterr()
regex = re.compile(
'An application running', re.I)
assert regex.search(stdout) is not None
@flaky
def test_list_metrics(client, capsys):
PROJECT_RESOURCE = "projects/{}".format(PROJECT)
list_resources.list_metric_descriptors(
client, PROJECT_RESOURCE, METRIC)
stdout, _ = capsys.readouterr()
regex = re.compile(
u'Delta CPU', re.I)
assert regex.search(stdout) is not None
@flaky
def test_list_timeseries(client, capsys):
PROJECT_RESOURCE = "projects/{}".format(PROJECT)
list_resources.list_timeseries(
client, PROJECT_RESOURCE, METRIC)
stdout, _ = capsys.readouterr()
regex = re.compile(u'list_timeseries response:\n', re.I)
assert regex.search(stdout) is not None
|
msbeta/apollo | modules/tools/ota/update_client.py | Python | apache-2.0 | 2,270 | 0 | #!/usr/bin/env python
###############################################################################
# Copyright 2017 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in complian | ce with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################### | ############################################################
import requests
import os
import sys
import urllib3
from ConfigParser import ConfigParser
from modules.data.proto.static_info_pb2 import VehicleInfo
import common.proto_utils as proto_utils
def update():
# setup server url
config = ConfigParser()
CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'config.ini')
config.read(CONFIG_FILE)
ip = config.get('Host', 'ip')
port = config.get('Host', 'port')
url = 'https://' + ip + ':' + port + '/update'
# setup car info
vehicle_info = VehicleInfo()
VEHICLE_INFO_FILE = os.path.join(
os.path.dirname(__file__), 'vehicle_info.pb.txt')
try:
proto_utils.get_pb_from_text_file(VEHICLE_INFO_FILE, vehicle_info)
except IOError:
print "vehicle_info.pb.txt cannot be open file."
exit()
brand = VehicleInfo.Brand.Name(vehicle_info.brand)
model = VehicleInfo.Model.Name(vehicle_info.model)
vin = vehicle_info.license.vin
car_info = {
"car_type": brand + "." + model,
"tag": sys.argv[1],
"vin": vin,
}
urllib3.disable_warnings()
CERT_FILE = os.path.join(os.path.dirname(__file__), 'ota.cert')
r = requests.post(url, json=car_info, verify=CERT_FILE)
if r.status_code == 200:
print "Update successfully."
sys.exit(0)
elif r.status_code == 400:
print "Invalid Request."
else:
print "Cannot connect to server."
sys.exit(1)
if __name__ == "__main__":
update()
|
t1g0r/ramey | src/backend/command/CommandHandler.py | Python | gpl-3.0 | 1,966 | 0.037131 | import sys,os
from pymongo import MongoClient
import importlib
import SwitchHandler
import CameraHandler
import SensorHandler
from pprint import pprint
class CommandHandler(object):
"""docstring for CommandHandler"""
def __init__(self, dbconn, command):
super(CommandHandler, self).__init__()
self.dbconn = dbconn
self.command = command
self.params = {}
self.params["callback"] = command["sendmessage"] #callback
self.params["callback2"] = command["sendphoto"] #callback2
self.params["command"] = self.command
self.AppConfig = self.command["AppConfig"]
self.callback = self.params["callback"]
self.callback2 = self.params["callback2"]
# self.callbacks = [self.callback,self.callback2]
def execute(self):
commandstr = self.command["message"][1:]
if " " in commandstr:
commandstr = commandstr[:commandstr.find(" ")]
# print "Command : '%s'" % commandstr
print "get from db"
| cCommand = self.dbconn.commandmapper.find({"commandkey":commandstr}).limit(1)
print "get db selesai"
#if commmand is not fou | nd, then send response
if cCommand.count() > 0:
cCommand = cCommand[0]
self.callback(self.command["account_id"],"hii %s, you just sent command name : '%s' and this is callback!" % (self.command["fullname"],cCommand["commandname"]))
try:
#execute command
#get package
self.modules = cCommand["class_ref"].split(".")
#fill params
self.params["class"] = self.modules[0]
self.params["method"] = self.modules[1]
self.params["id"] = cCommand["_id"]
# module = sys.modules[self.modules[0]]
# pprint(module)
module = eval(self.modules[0])
#get class
class_ = getattr(module, self.modules[0])
#init
instance = class_(self.dbconn,self.params)
#exec
instance.execute()
except Exception, e:
self.callback(self.command["account_id"],"Unhandled Command [%s]" % e)
raise e
else:
self.callback(self.command["account_id"],"Unknown Command.") |
maliciamrg/s3cmd | S3/ConnMan.py | Python | gpl-2.0 | 6,833 | 0.00439 | # -*- coding: utf-8 -*-
## Amazon S3 manager
## Author: Michal Ludvig <michal@logix.cz>
## http://www.logix.cz/michal
## License: GPL Version 2
## Copyright: TGRMN Software and contributors
import sys
import httplib
import ssl
from threading import Semaphore
from logging import debug
from Config import Config
from Exceptions import ParameterError
if not 'CertificateError ' in ssl.__dict__:
class CertificateError(Exception):
pass
ssl.CertificateError = CertificateError
__all__ = [ "ConnMan" ]
class http_connection(object):
context = None
context_set = False
@staticmethod
def _ssl_verified_context(cafile):
context = None
try:
context = ssl.create_default_context(cafile=cafile)
except AttributeError: # no ssl.create_default_context
pass
return context
@staticmethod
def _ssl_context():
if http_connection.context_set:
return http_connection.context
cfg = Config()
cafile = cfg.ca_certs_file
if cafile == "":
cafile = None
debug(u"Using ca_certs_file %s" % cafile)
context = http_connection._ssl_verified_context(cafile)
if context and not cfg.check_ssl_certificate:
context.check_hostname = False
debug(u'Disabling hostname checking')
http_connection.context = context
http_connection.context_set = True
return context
def match_hostname_aws(self, cert, e):
"""
Wildcard matching for *.s3.amazonaws.com and similar per region.
Per http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html:
"We recommend that all bucket names comply with DNS naming conventions."
Per http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html:
"When using virtual hosted-style buckets with SSL, the SSL
wild card certificate only matches buckets that do not contain
periods. To work around this, use HTTP or write your own
certificate verification logic."
Therefore, we need a custom validation routine that allows
mybucket.example.com.s3.amazonaws.com to be considered a valid
hostname for the *.s3.amazonaws.com wildcard cert, and for the
region-specific *.s3-[region].amazonaws.com wildcard cert.
"""
debug(u'checking SSL subjectAltName against amazonaws.com')
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if value.startswith('*.s3') and value.endswith('.amazonaws.com') and self.c.host.endswith('.amazonaws.com'):
return
raise e
def match_hostname(self):
cert = self.c.sock.getpeercert()
try:
ssl.match_hostname(cert, self.c.host)
except AttributeError: # old ssl module doesn't have this function
return
except ValueError: # empty SSL cert means underlying SSL library didn't validate it, we don't either.
return
except ssl.CertificateError, e:
self.match_hostname_aws(cert, e)
@staticmethod
def _https_connection(hostname, port=None):
try:
context = http_connection._ssl_context()
# S3's wildcart certificate doesn't work with DNS-style named buckets.
if hostname.endswith('.amazonaws.com') and context:
# this merely delays running the hostname check until
# after the connection is made and we get control
# back. We then run the same check, relaxed for S3's
# wildcard certificates.
context.check_hostname = Fals | e
conn = httplib.HTTPSConnection(hostname, port, context=context)
except TypeError:
conn = httplib.HTTPSConnection(hostname, port)
return conn
def __init__(self, id, hostname, ssl, cfg):
self.ssl = ssl
self.id = id
self.counter = 0
if not ssl:
if cfg.proxy_ | host != "":
self.c = httplib.HTTPConnection(cfg.proxy_host, cfg.proxy_port)
debug(u'proxied HTTPConnection(%s, %s)' % (cfg.proxy_host, cfg.proxy_port))
else:
self.c = httplib.HTTPConnection(hostname)
debug(u'non-proxied HTTPConnection(%s)' % hostname)
else:
if cfg.proxy_host != "":
self.c = http_connection._https_connection(cfg.proxy_host, cfg.proxy_port)
self.c.set_tunnel(hostname)
debug(u'proxied HTTPSConnection(%s, %s)' % (cfg.proxy_host, cfg.proxy_port))
debug(u'tunnel to %s' % hostname)
else:
self.c = http_connection._https_connection(hostname)
debug(u'non-proxied HTTPSConnection(%s)' % hostname)
class ConnMan(object):
conn_pool_sem = Semaphore()
conn_pool = {}
conn_max_counter = 800 ## AWS closes connection after some ~90 requests
@staticmethod
def get(hostname, ssl = None):
cfg = Config()
if ssl == None:
ssl = cfg.use_https
conn = None
if cfg.proxy_host != "":
if ssl and sys.hexversion < 0x02070000:
raise ParameterError("use_https=True can't be used with proxy on Python <2.7")
conn_id = "proxy://%s:%s" % (cfg.proxy_host, cfg.proxy_port)
else:
conn_id = "http%s://%s" % (ssl and "s" or "", hostname)
ConnMan.conn_pool_sem.acquire()
if not ConnMan.conn_pool.has_key(conn_id):
ConnMan.conn_pool[conn_id] = []
if len(ConnMan.conn_pool[conn_id]):
conn = ConnMan.conn_pool[conn_id].pop()
debug("ConnMan.get(): re-using connection: %s#%d" % (conn.id, conn.counter))
ConnMan.conn_pool_sem.release()
if not conn:
debug("ConnMan.get(): creating new connection: %s" % conn_id)
conn = http_connection(conn_id, hostname, ssl, cfg)
conn.c.connect()
if conn.ssl and cfg.check_ssl_certificate:
conn.match_hostname()
conn.counter += 1
return conn
@staticmethod
def put(conn):
if conn.id.startswith("proxy://"):
conn.c.close()
debug("ConnMan.put(): closing proxy connection (keep-alive not yet supported)")
return
if conn.counter >= ConnMan.conn_max_counter:
conn.c.close()
debug("ConnMan.put(): closing over-used connection")
return
ConnMan.conn_pool_sem.acquire()
ConnMan.conn_pool[conn.id].append(conn)
ConnMan.conn_pool_sem.release()
debug("ConnMan.put(): connection put back to pool (%s#%d)" % (conn.id, conn.counter))
|
Azure/azure-sdk-for-python | sdk/servicebus/azure-servicebus/tests/perf_tests/T1_legacy_tests/receive_message_batch.py | Python | mit | 1,104 | 0.002717 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import asyncio
from ._test_base import _ReceiveTest
class LegacyReceiveMessageBatchTest(_Re | ceiveTest):
def run_sync(self):
count = 0
while count < self.args.num_messages:
batch = self.receiver.fetch_next(max_batch_size=self.args.num_messages - count)
if self.args.peeklock:
for msg in batch:
msg.complete()
count += len(batch)
| async def run_async(self):
count = 0
while count < self.args.num_messages:
batch = await self.async_receiver.fetch_next(max_batch_size=self.args.num_messages - count)
if self.args.peeklock:
await asyncio.gather(*[m.complete() for m in batch])
count += len(batch)
|
tzaffi/boto3play | encodeAndS3sync.py | Python | mit | 2,841 | 0.036607 | import json
import os
import ntpath
import datetime as dt
import pprint
import boto3
def get_config():
'''
Load the configuration from ./config.json
'''
with open('config.json', 'r') as config_file:
return json.load(config_file)
def get_new_files(config, max_ | age_in_hours=24):
'''
Get array | of all files younger than max_age_in_hours,
unless max_age_in_hours is set to 0 - in which case all files
are provided.
Note, directories and files with the wrong extension type are ignored.
'''
now = dt.datetime.now()
content_dir = config['content']
ext = config['content_ext']
if max_age_in_hours <= 0:
cutoff = dt.datetime.min
else:
cutoff = now-dt.timedelta(hours=max_age_in_hours)
def is_new_file(fname):
path = os.path.join(content_dir, fname)
st = os.stat(path)
mtime = dt.datetime.fromtimestamp(st.st_mtime)
return mtime > cutoff and os.path.isfile(path) and path.endswith('.'+ext)
files = os.listdir(content_dir)
files = [os.path.join(content_dir, fname) for fname in files if is_new_file(fname)]
print("Found %d new files" % len(files))
return files
def get_encoding_filemap(config, files):
'''
Get a dictionary of pointing from files to encode, to
where these should be saved
'''
encoded_dir = config['encoded']
ext = '.' + config['encoded_ext']
result = {}
for ifname in files:
path, fname = ntpath.split(ifname)
ofname = os.path.join(encoded_dir, fname.split('.')[0] + ext)
result[ifname] = ofname
return result
def encode_files(config, filesmap):
'''
Save each file with line breaks replaced by spaces (or rather
the value of config['encoded_linebreak'])
'''
br_out= config['encoded_linebreak']
num_encodings = 0
for ifname, ofname in filesmap.items():
with open(ifname) as ifile, open(ofname, 'w') as ofile:
ofile.write(ifile.read().replace('\n',br_out).replace('\r',''))
num_encodings += 1
return num_encodings
def upload_files(config, filesmap):
'''
Upload the destination values of filesmap to S3.
'''
s3 = boto3.resource('s3')
bucket = config['s3_bucket']
s3_bucket = s3.Bucket(bucket)
folder = config['s3_folder']
for file in filesmap.values():
key = "/".join([folder, ntpath.split(file)[-1]])
print("Will upload %s --> %s / %s" % (file, bucket, key))
with open(file, 'rb') as data:
s3_bucket.put_object(Key=key, Body=data)
if __name__ == '__main__':
pp = pprint.PrettyPrinter(indent=4)
config = get_config()
print("\nconfig:")
pp.pprint(config)
new_files = get_new_files(config, max_age_in_hours=24)
print("\nnew files:")
pp.pprint(new_files)
encoding_filemap = get_encoding_filemap(config, new_files)
print("\nfile encoding map:")
pp.pprint(encoding_filemap)
success_count = encode_files(config, encoding_filemap)
print("\nSuccessfully encoded %d files" % success_count)
upload_files(config, encoding_filemap) |
CINPLA/expipe-dev | py-open-ephys/pyopenephys/core.py | Python | gpl-3.0 | 38,806 | 0.003298 | """
Python library for reading OpenEphys files.
Depends on: sys
os
glob
datetime
numpy
quantities
xmljson
xmltodict
Authors: Alessio Buccino @CINPLA,
Svenn-Arne Dragly @CINPLA,
Milad H. Mobarhan @CINPLA,
Mikkel E. Lepperod @CINPLA
"""
import quantities as pq
import os
import os.path as op
import numpy as np
from datetime import datetime
import locale
import struct
import platform
import xmltodict
from pyopenephys.tools import *
from pyopenephys.OpenEphys import *
class Channel:
def __init__(self, index, name, gain, channel_id):
self.index = index
self.id = channel_id
self.name = name
self.gain = gain
class AnalogSignal:
def __init__(self, channel_id, signal, times):
self.signal = signal
self.channel_id = channel_id
self.times = times
def __str__(self):
return "<OpenEphys analog signal:shape: {}, sample_rate: {}>".format(
self.signal.shape, self.sample_rate
)
class TrackingData:
def __init__(self, times, x, y, width, height, channels, metadata):
self.times = times
self.x = x
self.y = y
self.width = width
self.height = height
self.channels = channels
self.metadata = metadata
def __str__(self):
return "<OpenEphys tracking data: times shape: {}, positions shape: {}>".format(
self.times.shape, self.x.shape
)
class EventData:
def __init__(self, times, channels, channel_states, full_words, processor, node_id, metadata=None):
self.times = times
self.channels = channels
self.channel_states = channel_states
self.full_words = full_words
self.processor = processor
self.node_id = node_id
self.metadata = metadata
def __str__(self):
return "<OpenEphys event data>"
class MessageData:
def __init__(self, times, channels, text):
self.times = times
self.channels = channels
self.text = text
def __str__(self):
return "<OpenEphys message data>"
class SpikeTrain:
def __init__(self, times, waveforms,
electrode_indices, clusters, metadata):
assert len(waveforms.shape) == 3
self.times = times
self.waveforms = waveforms
self.electrode_indices = electrode_indices
self.clusters = clusters
self.metadata = metadata
@property
def num_spikes(self):
"""
Alias for spike_count.
"""
return self.waveforms.shape[0]
@property
def num_chans(self):
"""
Alias for channel_count.
"""
return self.waveforms.shape[1]
@property
def num_frames(self):
"""
Alias for channel_count.
"""
return self.waveforms.shape[2]
#todo fix channels where they belong!
class ChannelGroup:
d | ef __init__(self, channel_group_id, filename, channels,
fileclass=None, **attrs):
self.attrs = attrs
self.filename = filename
self.id = channel_group_id
self.channel | s = channels
self.fileclass = fileclass
def __str__(self):
return "<OpenEphys channel_group {}: channel_count: {}>".format(
self.id, len(self.channels)
)
@property
def analog_signals(self):
ana = self.fileclass.analog_signals[0]
analog_signals = []
for channel in self.channels:
analog_signals.append(AnalogSignal(signal=ana.signal[channel.id],
channel_id=channel.id,
times=ana.times))
return analog_signals
@property
def spiketrains(self):
return [sptr for sptr in self.fileclass.spiketrains
if sptr.attrs['channel_group_id'] == self.id]
class File:
"""
Class for reading experimental data from an OpenEphys dataset.
"""
def __init__(self, foldername, probefile=None):
# TODO assert probefile is a probefile
# TODO add default prb map and allow to add it later
self.probefile = probefile
self._absolute_foldername = foldername
self._path, self.relative_foldername = os.path.split(foldername)
# figure out format
files = [f for f in sorted(os.listdir(self._absolute_foldername))]
if np.any([f.startswith('Continuous') for f in files]):
self.format = 'openephys'
cont_files = [f for f in sorted(os.listdir(self._absolute_foldername))
if f.startswith('Continuous')]
exp_ids = []
for con in cont_files:
if len(con.split('_')) == 2:
exp_ids.append(1)
else:
exp_ids.append(int(con.split('_')[-1][0]))
self._experiments = []
for id in exp_ids:
self._experiments.append(Experiment(self._absolute_foldername, id, self))
elif np.any([f.startswith('experiment') for f in files]):
self.format = 'binary'
experiments_names = [f for f in sorted(os.listdir(self._absolute_foldername))
if os.path.isdir(op.join(self._absolute_foldername, f))
and 'experiment' in f]
exp_ids = [int(exp[-1]) for exp in experiments_names]
self._experiments = []
for (rel_path, id) in zip(experiments_names, exp_ids):
self._experiments.append(Experiment(op.join(self._absolute_foldername, rel_path), id, self))
elif np.any([f.endswith('nwb') for f in files]):
self.format = 'nwb'
@property
def absolute_foldername(self):
return self._absolute_foldername
@property
def path(self):
return self._path
@property
def experiments(self):
return self._experiments
class Experiment:
def __init__(self, path, id, file):
self.file = file
self.probefile = file.probefile
self.id = id
self.sig_chain = dict()
self._absolute_foldername = path
self._recordings = []
self.settings = None
self.acquisition_system = None
if self.file.format == 'openephys':
self._path = self._absolute_foldername
self._read_settings(id)
# retrieve number of recordings
if self.acquisition_system is not None:
if self.id == 1:
contFile = [f for f in os.listdir(self._absolute_foldername) if 'continuous' in f and 'CH' in f
and len(f.split('_')) == 2][0]
else:
contFile = [f for f in os.listdir(self._absolute_foldername) if 'continuous' in f and 'CH' in f
and '_' + str(self.id) in f][0]
data = load(op.join(self._absolute_foldername, contFile))
rec_ids = np.unique(data['recordingNumber'])
for rec_id in rec_ids:
self._recordings.append(Recording(self._absolute_foldername, int(rec_id), self))
else:
self._recordings.append(Recording(self._absolute_foldername, int(self.id), self))
elif self.file.format == 'binary':
self._path = op.dirname(path)
self._read_settings(id)
recording_names = [f for f in os.listdir(self._absolute_foldername)
if os.path.isdir(op.join(self._absolute_foldername, f))
and 'recording' in f]
rec_ids = [int(rec[-1]) for rec in recording_names]
for (rel_path, id) in zip(recording_names, rec_ids):
self._recordings.append(Recording(op.join(self._absolute_foldername, rel_path), id,
self))
@property
def absolute_foldername(self):
return self._absolute_foldername
@property
def path(self):
return self._ |
MoroGasper/client | client/javascript.py | Python | gpl-3.0 | 1,277 | 0.003132 | #!/usr/bin/env python
# encoding: utf-8
"""Copyright (C) 2013 COLDWELL AG
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/lic | enses/>.
"""
import json
try:
from pyv8 import PyV8
except ImportError:
import PyV8
pyv8 = PyV8
def execute(code):
if isinstance(code, unicode):
code = code.encode("utf-8")
with PyV8.JSContext() as c:
c.enter()
return c.eval(code)
def _c | onvert(data):
result = {}
for key in data.keys():
if isinstance(data[key], PyV8.JSObject):
result[key] = _convert(data[key])
else:
result[key] = data[key]
return result
def loads(data):
s = execute("JSON.stringify({})".format(data))
return json.loads(s)
|
rohitranjan1991/home-assistant | homeassistant/components/netgear_lte/notify.py | Python | mit | 1,330 | 0.001504 | """Support for Netgear LTE notifications."""
import logging
import attr
import eternalegypt
|
from homeassistant.components.notify | import ATTR_TARGET, BaseNotificationService
from . import CONF_NOTIFY, CONF_RECIPIENT, DATA_KEY
_LOGGER = logging.getLogger(__name__)
async def async_get_service(hass, config, discovery_info=None):
"""Get the notification service."""
if discovery_info is None:
return
return NetgearNotifyService(hass, discovery_info)
@attr.s
class NetgearNotifyService(BaseNotificationService):
"""Implementation of a notification service."""
hass = attr.ib()
config = attr.ib()
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""
modem_data = self.hass.data[DATA_KEY].get_modem_data(self.config)
if not modem_data:
_LOGGER.error("Modem not ready")
return
targets = kwargs.get(ATTR_TARGET, self.config[CONF_NOTIFY][CONF_RECIPIENT])
if not targets:
_LOGGER.warning("No recipients")
return
if not message:
return
for target in targets:
try:
await modem_data.modem.sms(target, message)
except eternalegypt.Error:
_LOGGER.error("Unable to send to %s", target)
|
jobiols/management-system | mgmtsystem_hazard/models/mgmtsystem_hazard_origin.py | Python | agpl-3.0 | 1,280 | 0 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# | This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it | will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class MgmtsystemHazardOrigin(models.Model):
_name = "mgmtsystem.hazard.origin"
_description = "Origin of hazard"
name = fields.Char('Origin', size=50, required=True, translate=True)
description = fields.Text('Description')
|
tortugueta/OET | patrons.py | Python | gpl-2.0 | 22,144 | 0.033689 | #! /usr/bin/python
# FIXME: for some reason the wheel does not appear centered
# FIXME: when I change some property in the shapes tab, the shape repositions
# itself
# The two previous FIXMES are related to the absolutely retarded way in which
# the Qt view looks | at the Qt Scene. I still have to figure out how to
# properly handle it.
# FIXME: The scene creation, animations and stuff, should probably be coded in
# the GraphicsWindow class, not in the MainWindow
# FIXME: When I create the corners in tab2 and also when I resize them, I need
# to add some weird offset to make them align to the edge of the scene. I
# should find out why I need those offsets.
# FI | XME: When stopping, lowering the rps, and starting again, sometimes you
# get a first rapid short rotation. That is because the initial angle is set
# to the end angle of the previous unit rotation. This is not trivial to
# solve because when I stop an animation mid-play, I don't know how to fetch
# the current angle of the figure.
# FIXME: the GraphicsWindow should be a MainWindow instad of a QDialog
# This is a test
import sys
import os
import math
import datetime
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import main_window
import graphics_window
import calibrationWindow
class MainWindow(QMainWindow, main_window.Ui_MainWindow):
"""
This is the main window of the program.
"""
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
main_window.Ui_MainWindow.__init__(self, parent)
# Build the main window using the setupUi method generated by Qt
# Designer
self.setupUi(self)
# Create the dialog that will show the graphics window
self.graphWin = GraphicsWindow(parent=self)
self.graphWin.show()
# Populate the list of shapes in the Shapes tab
self.tab3_shapesComboBox.addItems(['Circle', 'Rectangle'])
# Create the scenes for each tab
self.createScenes()
# Global connections
self.connect(self.tabWidget, SIGNAL("currentChanged(int)"), self.switchTab)
self.connect(self.actionReset, SIGNAL("activated()"), self.createScenes)
self.connect(self.actionInvert, SIGNAL("toggled(bool)"), self.updateColors)
# Connections for the Wheel tab
self.connect(self.tab1_scaleSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateProperties)
self.connect(self.tab1_innerRadiusSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateProperties)
self.connect(self.tab1_thicknessSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateProperties)
self.connect(self.tab1_engagePushButton, SIGNAL("clicked()"), self.wheelScene_startRotation)
self.connect(self.tab1_speedSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateParameters)
self.connect(self.tab1_distanceSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateParameters)
self.connect(self.tab1_densitySpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateParameters)
self.connect(self.tab1_diameterSpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateParameters)
self.connect(self.tab1_viscositySpinBox, SIGNAL("valueChanged(double)"), self.wheelScene_updateParameters)
self.connect(self.tab1_recordPushButton, SIGNAL("clicked()"), self.wheelScene_saveData)
# Connections for the Shapes tab
self.connect(self.tab3_shapesComboBox, SIGNAL("currentIndexChanged(int)"), self.shapesScene_update)
self.connect(self.tab3_thicknessSpinBox, SIGNAL("valueChanged(double)"), self.shapesScene_update)
self.connect(self.tab3_scaleSpinBox, SIGNAL("valueChanged(double)"), self.shapesScene_update)
self.connect(self.tab3_rotationSpinBox, SIGNAL("valueChanged(double)"), self.shapesScene_update)
self.connect(self.tab3_filledCheckBox, SIGNAL("stateChanged(int)"), self.shapesScene_update)
self.connect(self.tab3_groupedCheckBox, SIGNAL("stateChanged(int)"), self.shapesScene_update)
self.connect(self.tab3_nrowsSpinBox, SIGNAL("valueChanged(int)"), self.shapesScene_update)
self.connect(self.tab3_ncolumnsSpinBox, SIGNAL("valueChanged(int)"), self.shapesScene_update)
self.connect(self.tab3_rowPitchSpinBox, SIGNAL("valueChanged(double)"), self.shapesScene_update)
self.connect(self.tab3_columnPitchSpinBox, SIGNAL("valueChanged(double)"), self.shapesScene_update)
# Grab the current date for the filename
now = datetime.datetime.now()
dateStr = '-'.join([str(now.year), str(now.month), str(now.day)])
timeStr = ''.join(['%02d' % now.hour, '%02d' % now.minute, '%02d' % now.second])
self.fname_prefix = '_'.join([dateStr, timeStr])
def createScenes(self):
"""
Create all the scenes from scratch
"""
# Uncheck the "invert" action
self.actionInvert.setChecked(False)
# Create all the scenes
self.wheelScene = self.createScene_Wheel()
self.tab2Scene = self.createScene_Tab2()
self.shapesScene = self.createScene_Shapes()
self.shapesScene_update()
# Visualize the scene corresponding to the current tab and keep a pointer
# to the current scene in the GraphicsWindow instance
tabIndex = self.tabWidget.currentIndex()
self.switchTab(tabIndex)
def createScene_Wheel(self):
"""
Creates the scene of the wheel
"""
# Create the scene and set some basic properties
scene = QGraphicsScene(parent=self)
scene.setBackgroundBrush(Qt.black)
thickness = self.tab1_thicknessSpinBox.value()
pixelRadius_outer = 100
pixelRadius_inner = self.tab1_innerRadiusSpinBox.value()
pen = QPen(Qt.white, thickness)
# Create the items
outer_circle = QGraphicsEllipseItem(-pixelRadius_outer, -pixelRadius_outer, pixelRadius_outer*2, pixelRadius_outer*2)
inner_circle = QGraphicsEllipseItem(-pixelRadius_inner, -pixelRadius_inner, pixelRadius_inner*2, pixelRadius_inner*2)
vline = QGraphicsLineItem(0, -pixelRadius_outer, 0, pixelRadius_outer)
hline = QGraphicsLineItem(-pixelRadius_outer, 0, pixelRadius_outer, 0)
outer_circle.setPen(pen)
inner_circle.setPen(pen)
vline.setPen(pen)
hline.setPen(pen)
wheel = QGraphicsItemGroup()
wheel.addToGroup(outer_circle)
wheel.addToGroup(inner_circle)
wheel.addToGroup(vline)
wheel.addToGroup(hline)
wheel.setFlags(QGraphicsItem.GraphicsItemFlags(1)) # Make the item movable
wheel.setPos(QPointF(0, 0))
# Add the items to the scene
scene.addItem(wheel)
# Create a running variable that will be used to determine the rotation angle
# of the wheel
self.wheelAngle = 0.0
# Make the calculations with the initial values
self.wheelScene_updateParameters()
return scene
def wheelScene_updateProperties(self):
"""
Update the properties of the scene
"""
thickness = self.tab1_thicknessSpinBox.value()
scale = self.tab1_scaleSpinBox.value()
innerRadius = self.tab1_innerRadiusSpinBox.value()
if self.actionInvert.isChecked():
pen = QPen(Qt.black, thickness)
else:
pen = QPen(Qt.white, thickness)
for item in self.wheelScene.items()[0:4]:
item.setPen(pen)
self.wheelScene.items()[4].setScale(scale)
self.wheelScene.items()[2].setRect(-innerRadius, -innerRadius, innerRadius*2, innerRadius*2)
def wheelScene_startRotation(self):
"""
Start the rotation of the wheel
"""
unitRotation = 0.1 # seconds
timeline = QTimeLine(unitRotation * 1000)
timeline.setFrameRange(0, 1)
timeline.setUpdateInterval(1)
timeline.setCurveShape(3)
self.rotation = QGraphicsItemAnimation()
self.rotation.setTimeLine(timeline)
self.connect(timeline, SIGNAL("finished()"), self.wheelScene_startRotation)
self.connect(self.tab1_stopPushButton, SIGNAL("clicked()"), timeline.stop)
angularV = self.tab1_speedSpinBox.value()
initial = self.wheelAngle
if initial > 360:
initial -= 360
final = initial + angularV * 360 * unitRotation
self.wheelAngle = final
self.rotation.setRotationAt(0, initial)
self.rotation.setRotationAt(1, final)
self.rotation.setItem(self.wheelScene.items()[-1])
timeline.start()
def wheelScene_updateParameters(self):
"""
Update the linear velocity, DEP and centripetal force according to the
values of the parameters
"""
# Linear velocity
angularV_SI = self.tab1_speedSpinBox.value() * 2 * math.pi # rad/s
linearV = angula |
Davidjohnwilson/sympy | sympy/liealgebras/type_b.py | Python | bsd-3-clause | 4,651 | 0.001935 | from __future__ import print_function, division
from .cartan_type import Standard_Cartan
from sympy.core.compatibility import range
from sympy.matrices import eye
class TypeB(Standard_Cartan):
def __new__(cls, n):
if n < 2:
raise ValueError("n can not be less than 2")
return Standard_Cartan.__new__(cls, "B", n)
def dimension(self):
"""Dimension of the vector space V underlying the Lie algebra
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("B3")
>>> c.dimension()
3
"""
return self.n
def basic_root(self, i, j):
"""
This is a method just to generate roots
with a 1 iin the ith position and a -1
in the jth postion.
"""
root = [0]*self.n
root[i] = 1
root[j] = -1
return root
def simple_root(self, i):
"""
Every lie algebra has a unique root system.
Given a root system Q, there is a subset of the
roots such that an element of Q is called a
simple root if it cannot be written as the sum
of two elements in Q. If we let D denote the
set of simple roots, then it is clear that every
element of Q can be written as a linear combination
of elements of D with all coefficients non-negative.
In B_n the first n-1 simple roots are the same as the
roots in A_(n-1) (a 1 in the ith position, a -1 in
the (i+1)th position, and zeroes elsewhere). The n-th
simple root is the root with a 1 in the nth position
and zeroes elsewhere.
This method returns the ith simple root for the B series.
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("B3")
>>> c.simple_root(2)
[0, 1, -1]
"""
n = self.n
if i < n:
return self.basic_root(i-1, i)
else:
root = [0]*self.n
root[n-1] = 1
return root
def positive_roots(self):
"""
This method generates all the positive roots of
A_n. This is half of all of the roots of B_n;
by multiplying all the positive roots by -1 we
get the negative roots.
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType("A3")
>>> c.positive_roots()
{1: [1, -1, 0, 0], 2: [1, 0, -1, 0], 3: [1, 0, 0, -1], 4: [0, 1, -1, 0],
5: [0, 1, 0, -1], 6: [0, 0, 1, -1]}
"""
n = self.n
posroots = {}
k = 0
for i in range(0 | , n-1):
for j in range(i+1, n):
k += 1
posroots[k] = self.basic_root(i, j)
k += 1
| root = self.basic_root(i, j)
root[j] = 1
posroots[k] = root
for i in range(0, n):
k += 1
root = [0]*n
root[i] = 1
posroots[k] = root
return posroots
def roots(self):
"""
Returns the total number of roots for B_n"
"""
n = self.n
return 2*(n**2)
def cartan_matrix(self):
"""
Returns the Cartan matrix for B_n.
The Cartan matrix matrix for a Lie algebra is
generated by assigning an ordering to the simple
roots, (alpha[1], ...., alpha[l]). Then the ijth
entry of the Cartan matrix is (<alpha[i],alpha[j]>).
Examples
========
>>> from sympy.liealgebras.cartan_type import CartanType
>>> c = CartanType('B4')
>>> c.cartan_matrix()
Matrix([
[ 2, -1, 0, 0],
[-1, 2, -1, 0],
[ 0, -1, 2, -2],
[ 0, 0, -1, 2]])
"""
n = self.n
m = 2* eye(n)
i = 1
while i < n-1:
m[i, i+1] = -1
m[i, i-1] = -1
i += 1
m[0, 1] = -1
m[n-2, n-1] = -2
m[n-1, n-2] = -1
return m
def basis(self):
"""
Returns the number of independent generators of B_n
"""
n = self.n
return (n**2 - n)/2
def lie_algebra(self):
"""
Returns the Lie algebra associated with B_n
"""
n = self.n
return "so(" + str(2*n) + ")"
def dynkin_diagram(self):
n = self.n
diag = "---".join("0" for i in range(1, n)) + "=>=0\n"
diag += " ".join(str(i) for i in range(1, n+1))
return diag
|
matteobertozzi/RaleighSL | src/raleigh-client/pyraleigh/sql/expr.py | Python | apache-2.0 | 14,678 | 0.014852 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lexer import RqlLexer, RqlSyntaxError
import random
import math
DEFAULT_CONSTANTS = {
'PI': 3.1415926535897932384,
'NULL': None,
'FALSE': False,
'TRUE': True,
}
DEFAULT_FUNCTIONS = {
'abs': abs,
'min': min,
'max': max,
'sum': lambda *args: sum(args),
'acos': math.acos,
'asin': math.asin,
'atan': math.atan,
'ceil': math.ceil,
'cos': math.cos,
'exp': math.exp,
'floor': math.floor,
'log': math.log,
'random': random.random,
'sin': math.sin,
'sqrt': math.sqrt,
'tan': math.tan
}
# http://technet.microsoft.com/en-us/library/ms190276.aspx
EXPRESSION_OPERATORS = [
{'+', '-', '&', '^', '|'},
{'*', '/', '%'},
{'~'},
]
PREDICATE_OPERATORS = [
{'OR'},
{'AND'},
{'LIKE'},
{'==', '<', '>', '<=', '>=', '<>', '==', '!=', '!>', '!<'},
]
PREDICATE_EXPRESSION_OPERATORS = PREDICATE_OPERATORS + EXPRESSION_OPERATORS
def get_binary_precedence(operators, token):
if token:
tvalue = token.value
precedence = 0
for oplist in operators:
if tvalue in oplist:
return precedence
precedence += 1
return -1
def get_expr_identifiers(expression):
identifiers = set()
for key in expression.__slots__:
expr = getattr(expression, key)
if isinstance(expr, RqlIdentifier):
identifiers.add(expr)
else:
identifiers |= get_identifiers(expression)
return identifiers
class RqlFunctionCall(object):
__slots__ = ('name', 'args')
def __init__(self, name, args):
self.name = name
self.args = args
def __str__(self):
return '%s(%s)' % (self.name, self.args)
def __repr__(self):
return 'RqlFunctionCall(%r, %r)' % (self.name, self.args)
def is_constant(self, context ):
return False
def resolve(self, context):
# TODO: If args are const
return self
def evaluate(self, context):
func = context.functions.get(self.name)
if not func:
raise RqlSyntaxError("Unknown function '%s'" % self.name)
args = []
for arg in self.args:
args.append(arg.evaluate(context))
return func(*args)
class RqlIdentifier(object):
__slots__ = ('name')
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
def __repr__(self):
return 'RqlIdentifier(%r)' % (self.name)
def __cmp__(self, other):
return cmp(self.name, other)
def __hash__(self):
return hash(self.name)
def is_constant(self, context):
return False
def resolve(self, context):
return self
def evaluate(self, context):
return context.get_identifier(self.name)
class RqlBaseDataType(object):
__slots__ = ('value')
def __init__(self, value):
self.value = value
def __str__(self):
return '%s' % self.value
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.value)
def is_constant(self, context):
return True
def resolve(self, context):
return self
def evaluate(self, context):
return self.value
class RqlNumber(RqlBaseDataType):
def __init__(self, value):
super(RqlNumber, self).__init__(float(value))
class RqlBoolean(RqlBaseDataType):
def __init__(self, value):
super(RqlBoolean, self).__init__(bool(value))
class RqlString(RqlBaseDataType):
def __init__(self, value):
super(RqlString, self).__init__(value)
def __str__(self):
return '"%s"' % self.value
class RqlWildcard(RqlBaseDataType):
def __init__(self, value):
super(RqlWildcard, self).__init__(value)
class RqlUnary(object):
__slots__ = ('operator', 'expression')
def __init__(self, operator, expression):
self.operator = operator
self.expression = expression
def __str__(self):
return '%s %s' % (self.operator, self.expression)
def __repr__(self):
return 'RqlUnary(%r, %r)' % (self.operator, self.expression)
def is_constant(self, context):
return self.expression.is_constant(context)
def resolve(self, context):
self.expression = self.expression.resolve(context)
if self.is_constant(context):
return RqlNumber(self.evaluate(context))
return self
def evaluate(self, context):
expr = self.expression.evaluate(context)
if self.operator == '+': return expr
if self.operator == '-': return -expr
if self.operator == '~': return ~expr
if self.operator == 'NOT': return not expr
raise RqlSyntaxError("Unknown operator '%s'" % self.operator)
class RqlBinary(object):
__slots__ = ('operator', 'left', 'right')
def __init__(self, operator, left, right):
self.operator = operator
self.left = left
self.right = right
def __str__(self):
return '(%s %s %s)' % (self.left, self.operator, self.right)
def __repr__(self):
return 'RqlBinary(%r, %r, %r)' % (self.operator, sel | f.left, self.right)
def is_constant(self, context):
return self.left.is_constant(context) and self.right.is_constant(context)
def resolve(self, context):
self.left = self.left.resolve(context)
self.right = self.right.resolve(context)
if self.is_constant(context):
result = self.evaluate(context)
if isinstance(result, basestring):
return RqlString(result)
if isinstance(result, (int, float)):
| return RqlNumber(result)
if isinstance(result, bool):
return RqlBoolean(result)
raise RqlSyntaxError("Unexpected type %s %r" % (type(result), result))
return self
def evaluate(self, context):
left = self.left.evaluate(context)
right = self.right.evaluate(context)
# Expression
if self.operator == '+': return left + right
if self.operator == '-': return left - right
if self.operator == '&': return left & right
if self.operator == '|': return left | right
if self.operator == '^': return left ^ right
if self.operator == '*': return left * right
if self.operator == '/': return left / right
if self.operator == '%': return left % right
# Predicate
if self.operator == '=': return left == right
if self.operator == '<': return left < right
if self.operator == '>': return left > right
if self.operator == '<=': return left <= right
if self.operator == '>=': return left >= right
if self.operator == '==': return left == right
if self.operator == '!=': return left != right
if self.operator == 'IS': return left is right
if self.operator == 'OR': return left or right
if self.operator == 'AND': return left and right
# LIKE
raise RqlSyntaxError("Unknown operator '%s'" % self)
class RqlAssignment(object):
__slots__ = ('name', 'expression')
def __init__(self, name, expression):
self.name = name
self.expression = expression
def __repr__(self):
return 'RqlAssignment(%r, %r)' % (self.name, self.expression)
def is_constant(self, context):
return self.expression.is_constant(context)
def resolve(self):
self.expression = self.expression.resolve()
if self.is_constant(context):
return RqlNumber(self.evaluate(context))
return self
def evaluate(self, context):
right = self.expression.evaluate(context)
context.variables[self.name] = right
return right
class RqlParser(object):
__slots__ = ('lexer')
def __init__(self, lexer):
self.lexer = lexer
def parse_identifier(self):
items = self.parse_dot_list(self.lexer.expect_string_or_identifier)
return RqlIdentifier('.'.join(items))
# <List> ::= <Item> '.' <List> | <Item>
def parse_dot_list(self, item_parse_func):
return self.parse_list('.', item_parse_func)
# <List> ::= <Item> ',' <List> | <Item>
def parse_comma_list(self, item_parse_func |
antoinecarme/pyaf | tests/artificial/transf_Anscombe/trend_Lag1Trend/cycle_12/ar_/test_artificial_32_Anscombe_Lag1Trend_12__20.py | Python | bsd-3-clause | 263 | 0.087452 | import pya | f.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 12, transform = "Anscombe", sigma = 0.0, e | xog_count = 20, ar_order = 0); |
jcfr/mystic | setup.py | Python | bsd-3-clause | 12,858 | 0.002178 | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
from __future__ import with_statement
import os
import sys
# set version numbers
stable_version = '0.2a1'
target_version = '0.2a2'
is_release = False
# check if easy_install is available
try:
# import __force_distutils__ #XXX: uncomment to force use of distutills
from setuptools import setup
has_setuptools = True
except ImportError:
from distutils.core import setup
has_setuptools = False
# generate version number
if os.path.exists('mystic/info.py'):
# is a source distribution, so use existing version
#from mystic.info import this_version #FIXME?
this_version = stable_version
elif stable_version == target_version:
# we are building a stable release
this_version = stable_version
else:
# we are building a distribution
this_version = target_version + '.dev0'
if is_release:
from datetime import date
today = "".join(date.isoformat(date.today()).split('-'))
this_version += "-" + today
# get the license info
with open('LICENSE') as file:
license_text = file.read()
# generate the readme text
long_description = \
"""------------------------------------------------------
mystic: a simple model-independent inversion framework
------------------------------------------------------
The mystic framework provides a collection of optimization algorithms
and tools that allows the user to more robustly (and readily) solve
optimization problems. All optimization algorithms included in mystic
provide workflow at the fitting layer, not just access to the algorithms
as function calls. Mystic gives the user fine-grained power to both
monitor and steer optimizations as the fit processes are running.
Where possible, mystic optimizers share a common interface, and thus can
be easily swapped without the user having to write any new code. Mystic
solvers all conform to a solver API, thus also have common method calls
to configure and launch an optimization job. For more details, see
`mystic.abstract_solver`. The API also makes it easy to bind a favorite
3rd party solver into the mystic framework.
By providing a robust interface designed to allow the user to easily
configure and control solvers, mystic reduces the barrier to implementing
a target fitting problem as stable code. Thus the user can focus on
building their physical models, and not spend time hacking together an
interface to optimization code.
Mystic is in the early development stages, and any user feedback is
highly appreciated. Contact Mike McKerns [mmckerns at caltech dot edu]
with comments, suggestions, and any bugs you may find. A list of known
issues is maintained at http://dev.danse.us/trac/mystic/query.
Major Features
==============
Mystic provides a stock set of configurable, controllable solvers with::
- a common interface
- the ability to impose solver-independent bounds constraints
- the ability to apply solver-independent monitors
- the ability to configure solver-independent termination conditions
- a control handler yielding: [pause, continue, exit, and user_callback]
- ease in selecting initial conditions: [initial_guess, random]
- ease in selecting mutation strategies (for differential evolution)
To get up and running quickly, mystic also provides infrastructure to::
- easily generate a fit model (several example models are included)
- configure and auto-generate a cost function from a model
- extend fit jobs to parallel & distributed resources
- couple models with optimization parameter constraints [COMING SOON]
Current Release
===============
The latest stable release version is mystic-%(relver)s. You can download it here.
The latest stable version of mystic is always available at:
http://dev.danse.us/trac/mystic
Development Release
===================
If you like living on the edge, and don't mind the promise
of a little instability, you can get the latest development
release with all the shiny new features at:
http://dev.danse.us/packages.
Installation
============
Mystic is packaged to install from source, so you must
download the tarball, unzip, and run the installer::
[download]
$ tar -xvzf mystic-%(thisver)s.tgz
$ cd mystic-%(thisver)s
$ python setup py build
$ python setup py install
You will be warned of any missing dependencies and/or settings
after you run the "build" step above. Mystic depends on dill, numpy
and sympy, so you should install them first. There are several
functions within mystic where scipy is used if it is available;
however, scipy is an optional dependency. Having matplotlib installed
is necessary for running several of the examples, and you should
probably go get it even though it's not required. Matplotlib is
also required by mystic's "analysis viewers".
Alternately, mystic can be installed with easy_install::
[download]
$ easy_install -f . mystic
For Windows users, source code and examples are available in zip format.
A binary installer is also provided::
[download]
[double-click]
Requirements
============
Mystic requires::
- python, version >= 2.5, version < 3.0
- numpy, version >= 1.0
- sympy, version >= 0.6.7
- dill, version >= 0.2.3
- klepto, version >= 0.1.1
Optional requirements::
- setuptools, version >= 0.6
- matplotlib, version >= 0.91
- scipy, version >= 0.6.0
- pathos, version >= 0.2a1.dev0
- pyina, version >= 0.2a1.dev0
Usage Notes
===========
Probably the best way to get started is to look at a few of the
examples provided within mystic. See `mystic.examples` for a
set of scripts that demonstrate the configuration and launching of
optimization jobs for one of the sample models in `mystic.models`.
Many of the included examples are standard optimization test problems.
Instr1ctions on building a new model are in `mystic.models.abstract_model`.
Mystic provides base classes for two types of models::
- AbstractFunction [evaluates f(x) for given evaluation points x]
- AbstractModel [generates f(x,p) for given coefficients p]
It is, however, not necessary to use the base classes in your own model.
Mystic also provides some convienence functions to help you build a
model instance and a cost function instance on-the-fly. F | or more
information, see `mystic.mystic.forward_model`.
All mystic solvers are highly configurable, and provide a robust set of
methods to help customize the solver for your particular optimization
problem. For each solver, a minimal interface is also provided for users
who prefer to configure their solvers in a single fu | nction call. For more
information, see `mystic.mystic.abstract_solver` for the solver API, and
each of the individual solvers for their minimal (non-API compliant)
interface.
Mystic extends the solver API to parallel computing by providing a solver
class that utilizes the parallel map-reduce algorithm. Mystic includes
a set of defaults in `mystic.mystic.python_map` that mirror the behavior
of serial python and the built-in python map function. Mystic solvers
built on map-reduce can utilize the distributed and parallel tools provided
by the `pathos` package, and thus with little new code solvers are
extended to high-performance computing. For more information, see
`mystic.mystic.abstract_map_solver`, `mystic.mystic.abstract_ensemble_solver`,
and the pathos documentation at http://dev.danse.us/trac/pathos.
Important classes and functions are found here::
- mystic.mystic.solvers [solver optimization algorithms]
- mystic.mystic.termination [solver termination conditions]
- mystic.mystic.strategy [solver population mutation strategies]
- mystic.mystic.monitors [optimization monitors]
- mystic.mystic.tools [function wrappers, etc]
- mystic.mystic.forward_model |
ramineni/myironic | ironic/tests/drivers/test_ipminative.py | Python | apache-2.0 | 20,240 | 0.000296 | # coding=utf-8
# Copyright 2013 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either | express or implied. See the
# License for the specific language | governing permissions and limitations
# under the License.
"""
Test class for Native IPMI power driver module.
"""
import mock
from oslo_config import cfg
from pyghmi import exceptions as pyghmi_exception
from ironic.common import boot_devices
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.drivers.modules import console_utils
from ironic.drivers.modules import ipminative
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
CONF = cfg.CONF
INFO_DICT = db_utils.get_test_ipmi_info()
class IPMINativePrivateMethodTestCase(db_base.DbTestCase):
"""Test cases for ipminative private methods."""
def setUp(self):
super(IPMINativePrivateMethodTestCase, self).setUp()
self.node = obj_utils.create_test_node(self.context,
driver='fake_ipminative',
driver_info=INFO_DICT)
self.info = ipminative._parse_driver_info(self.node)
def test__parse_driver_info(self):
# make sure we get back the expected things
self.assertIsNotNone(self.info.get('address'))
self.assertIsNotNone(self.info.get('username'))
self.assertIsNotNone(self.info.get('password'))
self.assertIsNotNone(self.info.get('uuid'))
# make sure error is raised when info, eg. username, is missing
info = dict(INFO_DICT)
del info['ipmi_username']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
ipminative._parse_driver_info,
node)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_on(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'on'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.POWER_ON, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_off(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'off'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.POWER_OFF, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_error(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'Error'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.ERROR, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_on(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'on'}
self.config(retry_timeout=400, group='ipmi')
state = ipminative._power_on(self.info)
ipmicmd.set_power.assert_called_once_with('on', 400)
self.assertEqual(states.POWER_ON, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_off(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'off'}
self.config(retry_timeout=500, group='ipmi')
state = ipminative._power_off(self.info)
ipmicmd.set_power.assert_called_once_with('off', 500)
self.assertEqual(states.POWER_OFF, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__reboot(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'on'}
self.config(retry_timeout=600, group='ipmi')
state = ipminative._reboot(self.info)
ipmicmd.set_power.assert_called_once_with('boot', 600)
self.assertEqual(states.POWER_ON, state)
def _create_sensor_object(self, value, type_, name, states=None,
units='fake_units', health=0):
if states is None:
states = []
return type('Reading', (object, ), {'value': value, 'type': type_,
'name': name, 'states': states,
'units': units, 'health': health})()
@mock.patch('pyghmi.ipmi.command.Command')
def test__get_sensors_data(self, ipmi_mock):
reading_1 = self._create_sensor_object('fake_value1',
'fake_type_A',
'fake_name1')
reading_2 = self._create_sensor_object('fake_value2',
'fake_type_A',
'fake_name2')
reading_3 = self._create_sensor_object('fake_value3',
'fake_type_B',
'fake_name3')
readings = [reading_1, reading_2, reading_3]
ipmicmd = ipmi_mock.return_value
ipmicmd.get_sensor_data.return_value = readings
expected = {
'fake_type_A': {
'fake_name1': {
'Health': '0',
'Sensor ID': 'fake_name1',
'Sensor Reading': 'fake_value1 fake_units',
'States': '[]',
'Units': 'fake_units'
},
'fake_name2': {
'Health': '0',
'Sensor ID': 'fake_name2',
'Sensor Reading': 'fake_value2 fake_units',
'States': '[]',
'Units': 'fake_units'
}
},
'fake_type_B': {
'fake_name3': {
'Health': '0',
'Sensor ID': 'fake_name3',
'Sensor Reading': 'fake_value3 fake_units',
'States': '[]', 'Units': 'fake_units'
}
}
}
ret = ipminative._get_sensors_data(self.info)
self.assertEqual(expected, ret)
@mock.patch('pyghmi.ipmi.command.Command')
def test__get_sensors_data_missing_values(self, ipmi_mock):
reading_1 = self._create_sensor_object('fake_value1',
'fake_type_A',
'fake_name1')
reading_2 = self._create_sensor_object(None,
'fake_type_A',
'fake_name2')
reading_3 = self._create_sensor_object(None,
'fake_type_B',
'fake_name3')
readings = [reading_1, reading_2, reading_3]
ipmicmd = ipmi_mock.return_value
ipmicmd.get_sensor_data.return_value = readings
expected = {
'fake_type_A': {
'fake_name1': {
'Health': '0',
'Sensor ID': 'fake_name1',
'Sensor Reading': 'fake_value1 fake_units',
'States': '[]',
'Units': 'fake_units'
}
}
}
ret = ipminative._get_sensor |
yostashiro/awo-custom | purchase_line_split/__openerp__.py | Python | lgpl-3.0 | 1,484 | 0.001348 | # -*- coding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2016 Rooms For (Hong Kong) Limited T/A OSCG
# <https://www.odoo-asia.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You shoul | d have received a copy of the GNU Affero Gener | al Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{
'name': 'Purchase Line Split',
'summary':"""""",
'version': '8.0.0.5.0',
'category': 'Purchases',
'author': 'Rooms For (Hong Kong) Limited T/A OSCG',
'website': 'https://www.odoo-asia.com',
'license': "AGPL-3",
'application': False,
'installable': True,
'depends': ['sale_line_quant'],
'description': """
* Adds a button in RFQ to split order lines so that each line has 1 for quantity.
* Hides "Send RFQ by Email" and "Print RFQ" buttons.
""",
'data': [
'views/purchase_view.xml',
],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
space-age-robotics/gerbmerge-patched | gerbmerge/config.py | Python | gpl-3.0 | 14,779 | 0.017119 | #!/usr/bin/env python
"""
Parse the GerbMerge configuration file.
--------------------------------------------------------------------
This program is licensed under the GNU General Public License (GPL)
Version 3. See http://www.fsf.org for details of the license.
Rugged Circuits LLC
http://ruggedcircuits.com/gerbmerge
"""
import sys
import ConfigParser
import re
import string
import jobs
import aptable
# Configuration dictionary. Specify floats as strings. Ints can be specified
# as ints or strings.
Config = {
'xspacing': '0.125', # Spacing in horizontal direction
'yspacing': '0.125', # Spacing in vertical direction
'panelwidth': '12.6', # X-Dimension maximum panel size (Olimex)
'panelheight': '7.8', # Y-Dimension maximum panel size (Olimex)
'cropmarklayers': None, # e.g., *toplayer,*bottomlayer
'cropmarkwidth': '0.01', # Width (inches) of crop lines
'cutlinelayers': None, # as for cropmarklayers
'cutlinewidth': '0.01', # Width (inches) of cut lines
'minimumfeaturesize': 0, # Minimum dimension for selected layers
'toollist': None, # Name of file containing default tool list
'drillclustertolerance': '.002', # Tolerance for clustering drill sizes
'allowmissinglayers': 0, # Set to 1 to allow multiple jobs to have non-matching layers
'fabricationdrawingfile': None, # Name of file to which to write fabrication drawing, or None
'fabricationdrawingtext': None, # Name of file containing text to write to fab drawing
'excellondecimals': 4, # Number of digits after the decimal point in input Excellon files
'excellonleadingzeros': 0, # Generate leading zeros in merged Excellon output file
'outlinelayerfile': None, # Name of file to which to write simple box outline, or None
'outlinelayers': None, # e.g., *t | oplayer, *bottomlayer
'scoringfile': None, # Name of file to which to write scoring data, or None
'leftmargin': 0, | # Inches of extra room to leave on left side of panel for tooling
'topmargin': 0, # Inches of extra room to leave on top side of panel for tooling
'rightmargin': 0, # Inches of extra room to leave on right side of panel for tooling
'bottommargin': 0, # Inches of extra room to leave on bottom side of panel for tooling
'fiducialpoints': None, # List of X,Y co-ordinates at which to draw fiducials
'fiducialcopperdiameter': 0.08, # Diameter of copper part of fiducial
'fiducialmaskdiameter': 0.32, # Diameter of fiducial soldermask opening
}
# these are for special text, printed on every layer
text = None
text_size = None # mils, must be enough less than Yspacing that there isn't overlap
# if not specified, deduce based on Yspacing and other variables
# (cutline width, etc.)
text_stroke = None # mils, deduce based on text_size
text_rotation = None # degrees
text_x = None # if not specified, put it in the first cutline area
text_y = None # if not specified, put it in the first cutline area
min_text_stroke = 6 # mils, this is the minimum at SeeedStudio
min_text_size = 32 # mils, this is the minimum at SeeedStudio
# This dictionary is indexed by lowercase layer name and has as values a file
# name to use for the output.
MergeOutputFiles = {
'boardoutline': 'merged.boardoutline.ger',
'drills': 'merged.drills.xln',
'placement': 'merged.placement.txt',
'toollist': 'merged.toollist.drl'
}
# The global aperture table, indexed by aperture code (e.g., 'D10')
GAT = {}
# The global aperture macro table, indexed by macro name (e.g., 'M3', 'M4R' for rotated macros)
GAMT = {}
# The list of all jobs loaded, indexed by job name (e.g., 'PowerBoard')
Jobs = {}
# The set of all Gerber layer names encountered in all jobs. Doesn't
# include drills.
LayerList = {'boardoutline': 1}
# The tool list as read in from the DefaultToolList file in the configuration
# file. This is a dictionary indexed by tool name (e.g., 'T03') and
# a floating point number as the value, the drill diameter in inches.
DefaultToolList = {}
# The GlobalToolMap dictionary maps tool name to diameter in inches. It
# is initially empty and is constructed after all files are read in. It
# only contains actual tools used in jobs.
GlobalToolMap = {}
# The GlobalToolRMap dictionary is a reverse dictionary of ToolMap, i.e., it maps
# diameter to tool name.
GlobalToolRMap = {}
##############################################################################
# This configuration option determines whether trimGerber() is called
TrimGerber = 1
# This configuration option determines whether trimExcellon() is called
TrimExcellon = 1
# This configuration option determines the minimum size of feature dimensions for
# each layer. It is a dictionary indexed by layer name (e.g. '*topsilkscreen') and
# has a floating point number as the value (in inches).
MinimumFeatureDimension = {}
# This configuration option is a positive integer that determines the maximum
# amout of time to allow for random placements (seconds). A SearchTimeout of 0
# indicates that no timeout should occur and random placements will occur
# forever until a KeyboardInterrupt is raised.
SearchTimeout = 0
# Construct the reverse-GAT/GAMT translation table, keyed by aperture/aperture macro
# hash string. The value is the aperture code (e.g., 'D10') or macro name (e.g., 'M5').
def buildRevDict(D):
RevD = {}
for key,val in D.items():
RevD[val.hash()] = key
return RevD
def parseStringList(L):
"""Parse something like '*toplayer, *bottomlayer' into a list of names
without quotes, spaces, etc."""
if 0:
if L[0]=="'":
if L[-1] != "'":
raise RuntimeError, "Illegal configuration string '%s'" % L
L = L[1:-1]
elif L[0]=='"':
if L[-1] != '"':
raise RuntimeError, "Illegal configuration string '%s'" % L
L = L[1:-1]
# This pattern matches quotes at the beginning and end...quotes must match
quotepat = re.compile(r'^([' "'" '"' r']?)([^\1]*)\1$')
delimitpat = re.compile(r'[ \t]*[,;][ \t]*')
match = quotepat.match(L)
if match:
L = match.group(2)
return delimitpat.split(L)
# Parse an Excellon tool list file of the form
#
# T01 0.035in
# T02 0.042in
def parseToolList(fname):
TL = {}
try:
fid = file(fname, 'rt')
except Exception, detail:
raise RuntimeError, "Unable to open tool list file '%s':\n %s" % (fname, str(detail))
pat_in = re.compile(r'\s*(T\d+)\s+([0-9.]+)\s*in\s*')
pat_mm = re.compile(r'\s*(T\d+)\s+([0-9.]+)\s*mm\s*')
pat_mil = re.compile(r'\s*(T\d+)\s+([0-9.]+)\s*(?:mil)?')
for line in fid.xreadlines():
line = string.strip(line)
if (not line) or (line[0] in ('#', ';')): continue
mm = 0
mil = 0
match = pat_in.match(line)
if not match:
mm = 1
match = pat_mm.match(line)
if not match:
mil = 1
match = pat_mil.match(line)
if not match:
continue
#raise RuntimeError, "Illegal tool list specification:\n %s" % line
tool, size = match.groups()
try:
size = float(size)
except:
raise RuntimeError, "Tool size in file '%s' is not a valid floating-point number:\n %s" % (fname,line)
if mil:
size = size*0.001 # Convert mil to inches
elif mm:
size = size/25.4 # Convert mm to inches
# Canonicalize tool so that T1 becomes T01
tool = 'T%02d' % int(tool[1:])
if TL.has_key(tool):
raise RuntimeError, "Tool '%s' defined more than once in tool list file '%s'" % (tool,fname)
TL[tool]=size
fid.close()
return TL
# This function parses the job configuration file and does
# everything needed to:
#
# * parse global options and store them in the Config dictionary
# as natural types (i.e., ints, floats, lists)
#
# * Read Gerber/Excellon data and populate the Jobs dictionary
#
# * Read Gerber/Excellon data and populate the global apertu |
michaelpacer/scikit-image | skimage/filters/rank/generic.py | Python | bsd-3-clause | 31,373 | 0 | """The local histogram is computed using a sliding window similar to the method
described in [1]_.
Input image can be 8-bit or 16-bit, for 16-bit input images, the number of
histogram bins is determined from the maximum value present in the image.
Result image is 8-/16-bit or double with respect to the input image and the
rank filter operation.
References
----------
.. [1] Huang, T. ,Yang, G. ; Tang, G.. "A fast two-dimensional
median filtering algorithm", IEEE Transactions on Acoustics, Speech and
Signal Processing, Feb 1979. Volume: 27 , Issue: 1, Page(s): 13 - 18.
"""
import warnings
import numpy as np
from ... import img_as_ubyte
from ..._shared.utils import assert_nD
from . import generic_cy
__all__ = ['autolevel', 'bottomhat', 'equalize', 'gradient', 'maximum', 'mean',
'subtract_mean', 'median', 'minimum', 'modal', 'enhance_contrast',
'pop', 'threshold', 'tophat', 'noise_filter', 'entropy', 'otsu']
def _handle_input(image, selem, out, mask, out_dtype=None, pixel_size=1):
assert_nD(image, 2)
if image.dtype not in (np.uint8, np.uint16):
image = img_as_ubyte(image)
selem = np.ascontiguousarray(img_as_ubyte(selem > 0))
image = np.ascontiguousarray(image)
if mask is None:
mask = np.ones(image.shape, dtype=np.uint8)
else:
mask = img_as_ubyte(mask)
mask = np.ascontiguousarray(mask)
if image is out:
raise NotImplementedError("Cannot perform rank operation in place.")
if out is None:
if out_dtype is None:
out_dtype = image.dtype
out = np.empty(image.shape+(pixel_size,), dtype=out_dtype)
else:
if len(out.shape) == 2:
out = out.reshape(out.shape+(pixel_size,))
is_8bit = image.dtype in (np.uint8, np.int8)
if is_8bit:
max_bin = 255
else:
max_bin = max(4, image.max())
bitdepth = int(np.log2(max_bin))
if bitdepth > 10:
warnings.warn("Bitdepth of %d may result in bad rank filter "
"performance due to large number of bins." % bitdepth)
return image, selem, out, mask, max_bin
def _apply_scalar_per_pixel(func, image, selem, out, mask, shift_x, shift_y,
out_dtype=None):
image, selem, out, mask, max_bin = _handle_input(image, selem, out, mask,
out_dtype)
func(image, selem, shift_x=shift_x, shift_y=shift_y, mask=mask,
out=out, max_bin=max_bin)
return out.reshape(out.shape[:2])
def _apply_vector_per_pixel(func, image, selem, out, mask, shift_x, shift_y,
out_dtype=None, pixel_size=1):
image, selem, out, mask, max_bin = _handle_input(image, selem, out, mask,
out_dtype,
pixel_size=pixel_size)
func(image, selem, shift_x=shift_x, shift_y=shift_y, mask=mask,
out=out, max_bin=max_bin)
return out
def autolevel(image, selem, out=None, mask=None, shift_x=False, shift_y=False):
"""Auto-level image using local histogram.
This filter locally stretches the histogram of greyvalues to cover the
entire range of values from "white" to "black".
Parameters
----------
image : 2-D array (uint8, uint16)
Input image.
selem : 2-D array
The neighborhood expressed as a 2-D array of 1's and 0's.
out : 2-D array (same dtype as input)
If None, a new array is allocated.
mask : ndarray
Mask array that defines (>0) area of the image included in the local
neighborhood. If None, the complete image is used (default).
shift_x, shift_y : int
Offset added to the structuring element center point. Shift is bounded
to the structuring element sizes (center must be inside the given
structuring element).
Returns
-------
out : 2-D array (same dtype as input image)
Output image.
Examples
--------
>>> from skimage import data
>>> from skimage.morphology import disk
>>> from skimage.filters.rank import autolevel
>>> img = data.camera()
>>> auto = autolevel(img, disk(5))
"""
return _apply_scalar_per_pixel(generic_cy._autolevel, image, selem,
out=out, mask=mask,
shift_x=shift_x, shift_y=shift_y)
def bottomhat(image, selem, out=None, mask=None, shift_x=False, shift_y=False):
"""Local bottom-hat of an image.
This filter computes the morphological closing of the image and then
subtracts the result from the original image.
Parameters
----------
image : 2-D array (uint8, uint16)
Input image.
selem : 2-D array
The neighborhood expressed as a 2-D array of 1's and 0's.
out : 2-D array (same dtype as input)
| If None, a new array is allocated.
mask : 2-D array
Mask array that defines (>0) area of the image included in the local
neighborhood. If None, the complete image is used (default).
shift_x, shift_y : int
Offset added to the structuring element center point. Shift is bounded
to the structuring element sizes (center must be inside the given
| structuring element).
Returns
-------
out : 2-D array (same dtype as input image)
Output image.
Examples
--------
>>> from skimage import data
>>> from skimage.morphology import disk
>>> from skimage.filters.rank import bottomhat
>>> img = data.camera()
>>> out = bottomhat(img, disk(5))
"""
return _apply_scalar_per_pixel(generic_cy._bottomhat, image, selem,
out=out, mask=mask,
shift_x=shift_x, shift_y=shift_y)
def equalize(image, selem, out=None, mask=None, shift_x=False, shift_y=False):
"""Equalize image using local histogram.
Parameters
----------
image : 2-D array (uint8, uint16)
Input image.
selem : 2-D array
The neighborhood expressed as a 2-D array of 1's and 0's.
out : 2-D array (same dtype as input)
If None, a new array is allocated.
mask : ndarray
Mask array that defines (>0) area of the image included in the local
neighborhood. If None, the complete image is used (default).
shift_x, shift_y : int
Offset added to the structuring element center point. Shift is bounded
to the structuring element sizes (center must be inside the given
structuring element).
Returns
-------
out : 2-D array (same dtype as input image)
Output image.
Examples
--------
>>> from skimage import data
>>> from skimage.morphology import disk
>>> from skimage.filters.rank import equalize
>>> img = data.camera()
>>> equ = equalize(img, disk(5))
"""
return _apply_scalar_per_pixel(generic_cy._equalize, image, selem,
out=out, mask=mask,
shift_x=shift_x, shift_y=shift_y)
def gradient(image, selem, out=None, mask=None, shift_x=False, shift_y=False):
"""Return local gradient of an image (i.e. local maximum - local minimum).
Parameters
----------
image : 2-D array (uint8, uint16)
Input image.
selem : 2-D array
The neighborhood expressed as a 2-D array of 1's and 0's.
out : 2-D array (same dtype as input)
If None, a new array is allocated.
mask : ndarray
Mask array that defines (>0) area of the image included in the local
neighborhood. If None, the complete image is used (default).
shift_x, shift_y : int
Offset added to the structuring element center point. Shift is bounded
to the structuring element sizes (center must be inside the given
structuring element).
Returns
-------
out : 2-D array (same dtype as input image)
Output image.
Examples
--------
>>> from skimage import data
>>> from skimage.morphology import disk
>>> from skimage.filters.rank import g |
camradal/ansible | lib/ansible/modules/windows/win_file.py | Python | gpl-3.0 | 2,738 | 0.002191 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'cor | e',
'version': '1.0'}
DOCUMENTATION = r'''
---
module: win_file
version_added: "1.9.2"
short_description: Creates, touches or removes files or directories.
description:
- Creates (empty) files, updates file modification stamps of existing files,
and can create or remove directories.
- Unlike M(file), does not modify ownership, permissions or manipulate links.
notes:
- See also M(win_copy), | M(win_template), M(copy), M(template), M(assemble)
requirements: [ ]
author: "Jon Hawkesworth (@jhawkesworth)"
options:
path:
description:
- 'path to the file being managed. Aliases: I(dest), I(name)'
required: true
aliases: ['dest', 'name']
state:
description:
- If C(directory), all immediate subdirectories will be created if they
do not exist.
If C(file), the file will NOT be created if it does not exist, see the M(copy)
or M(template) module if you want that behavior. If C(absent),
directories will be recursively deleted, and files will be removed.
If C(touch), an empty file will be created if the C(path) does not
exist, while an existing file or directory will receive updated file access and
modification times (similar to the way C(touch) works from the command line).
choices: [ file, directory, touch, absent ]
'''
EXAMPLES = r'''
- name: Create a file
win_file:
path: C:\Temp\foo.conf
state: file
- name: Touch a file (creates if not present, updates modification time if present)
win_file:
path: C:\Temp\foo.conf
state: touch
- name: Remove a file, if present
win_file:
path: C:\Temp\foo.conf
state: absent
- name: Create directory structure
win_file:
path: C:\Temp\folder\subfolder
state: directory
- name: Remove directory structure
win_file:
path: C:\Temp
state: absent
'''
|
Huyuwei/tvm | nnvm/tests/python/compiler/test_to_relay.py | Python | apache-2.0 | 2,176 | 0.001379 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.a | pache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import | nnvm
from nnvm import testing
from nnvm import to_relay
import tvm
from tvm.relay import transform
from tvm.relay import create_executor
from tvm.contrib import graph_runtime
import numpy as np
def check_model(sym, shapes, dtypes, params):
net = nnvm.graph.create(sym)
graph_json, mod, params = nnvm.compiler.build(
net,
'llvm',
shape=shapes,
dtype=dtypes,
params=params)
nnvm_rts = graph_runtime.create(graph_json, mod, tvm.cpu(0))
inputs = {}
for name in shapes:
np_array = np.random.rand(*shapes[name]).astype('float32')
inputs[name] = tvm.nd.array(np_array)
nnvm_rts.set_input(**params)
nnvm_rts.run(**inputs)
nnvm_out = nnvm_rts.get_output(0)
relay_model, params = to_relay.to_relay(net, shapes, dtypes, params)
mod = tvm.relay.Module.from_expr(relay_model)
mod = transform.InferType()(mod)
relay_rts = create_executor(kind='graph', mod=mod, ctx=tvm.cpu(0), target='llvm')
inputs.update(params)
relay_out = relay_rts.evaluate()(*list(inputs.values()))
np.testing.assert_allclose(nnvm_out.asnumpy(), relay_out.asnumpy())
# def test_mlp():
# mlp, params = testing.mlp.get_workload(1)
# shapes = { "data": (10, 3, 224, 224) }
# dtypes = { "data": 'float32' }
# check_model(mlp, shapes, dtypes, params)
if __name__ == "__main__":
test_mlp()
|
MikeDMorgan/gwas_pipeline | scripts/pheno2plot.py | Python | mit | 7,293 | 0.000686 | '''
pheno2plot.py - format and manipulate phenotype files
====================================================
:Author:
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
.. Generate plots of phenotype data
Usage
-----
.. Example use case
Example::
python pheno2plot.py
Type::
python pheno2plot.py --help
for command line help.
Command line options
--------------------
'''
import sys
import CGAT.Experiment as E
from rpy2.robjects import r as R
from rpy2.robjects import pandas2ri
import pandas as pd
import PipelineGWAS as gwas
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("-t", "--test", dest="test", type="string",
help="supply help")
parser.add_option("-p", "--plot-type", dest="plot_type", type="choice",
choices=["histogram", "barplot", "density",
"boxplot", "scatter", "map",
"pca"],
help="the plot type to generate")
parser.add_option("--plot-n-pc", dest="n_pcs", type="int",
help="The number of principal components to "
"plot")
parser.add_option("-g", "--group-by", dest="group_by", type="string",
help="column header to group observations by")
parser.add_option("-x", "--x-column", dest="x_col", type="string",
help="column to plot on X axis")
parser.add_option("-y", "--y-column", dest="y_col", type="string",
help="column to plot on y axis")
parser.add_option("-i", "--index_column", dest="indx", type="string",
help="column number that refers to the row index")
parser.add_option("--output-file", dest="outfile", type="string",
help="path and filename to save plot to")
parser.add_option("--labels", dest="labels", type="string",
help="a comma-separated list of axis labels. "
"The first 2 correspond to the X and Y-axis, "
"respectively, and the third is the plot title")
parser.add_option("--metadata-file", dest="meta_file", type="string",
help="file containing metadata for annotating "
"plots with. Use `--group-labels` to define table "
"columns to use")
parser.add_option("--fam-file", dest="fam_file", ty | pe="string",
help="Plink .fam file containing file IDs")
parser.add_option("--xvar-labels", dest="xvar_labs", type="string",
help="a comma-separated list of variable X labels"
"only applies when X is a discrete or categorical "
"variable. The labels must be in the correct order")
parser.add_option("--group-labels", dest="group_labs", type= | "string",
help="a comma-separated list of grouping variable "
"labels. Can only apply when the grouping variable "
"is discrete or categorical. The labels must be "
"input in the order of the data")
parser.add_option("--yvar-labels", dest="yvar_labs", type="string",
help="a comma-separated list of variable Y labels"
"only applies when Y is a discrete or categorical "
"variable")
parser.add_option("--var-type", dest="var_type", type="choice",
choices=["continuous", "categorical", "integer"],
help="The data type of the variables to be plotted."
"The default is continuous")
parser.add_option("--coordinate-file", dest="coordinates", type="string",
help="file containing co-ordinates data")
parser.add_option("--coords-id-col", dest="coord_ids", type="string",
help="column header containing individual IDs")
parser.add_option("--lattitude-column", dest="lat_col", type="string",
help="column header containing lattitude co-ordinates")
parser.add_option("--longitude-column", dest="long_col", type="string",
help="column header containing longitude co-ordinates")
parser.add_option("--reference-value", dest="ref_val", type="string",
help="categorical variable level to dichotomise on")
# add common options (-h/--help, ...) and parse command line
(options, args) = E.Start(parser, argv=argv)
parser.set_defaults(y_col=None,
group_by=None,
indx=None,
labels="X,Y,title",
xvar_labs=None,
yvar_labs=None,
var_type="continuous")
infile = argv[-1]
df = pd.read_table(infile, sep="\t", index_col=options.indx,
header=0)
if options.plot_type == "map":
df = pd.read_table(infile, sep="\t", index_col=options.indx,
header=0)
coords_df = pd.read_table(options.coordinates, sep="\t",
header=0, index_col=options.indx)
gwas.plotMapPhenotype(data=df,
coords=coords_df,
coord_id_col=options.coord_ids,
lat_col=options.lat_col,
long_col=options.long_col,
save_path=options.outfile,
xvar=options.x_col,
var_type=options.var_type,
xlabels=options.xvar_labs,
level=options.ref_val)
elif options.plot_type == "pca":
data = gwas.parseFlashPCA(pcs_file=infile,
fam_file=options.fam_file)
gwas.plotPCA(data=data,
nPCs=options.n_pcs,
point_labels=options.group_labs,
save_path=options.outfile,
headers=False,
metadata=options.meta_file,
multiplot=True)
else:
df = pd.read_table(infile, sep="\t", index_col=options.indx,
header=0)
gwas.plotPhenotype(data=df,
plot_type=options.plot_type,
x=options.x_col,
y=options.y_col,
group=options.group_by,
save_path=options.outfile,
labels=options.labels,
xlabels=options.xvar_labs,
ylabels=options.yvar_labs,
glabels=options.group_labs,
var_type=options.var_type)
# write footer and output benchmark information.
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
ebukoz/thrive | erpnext/controllers/taxes_and_totals.py | Python | gpl-3.0 | 28,062 | 0.023698 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import json
import frappe, erpnext
from frappe import _, scrub
from frappe.utils import cint, flt, round_based_on_smallest_currency_fraction
from erpnext.controllers.accounts_controller import validate_conversion_rate, \
validate_taxes_and_charges, validate_inclusive_tax
from erpnext.stock.get_item_details import _get_item_tax_template
class calculate_taxes_and_totals(object):
def __init__(self, doc):
self.doc = doc
self.calculate()
def calculate(self):
if not len(self.doc.get("items")):
return
self.discount_amount_applied = False
self._calculate()
if self.doc.meta.get_field("discount_amount"):
self.set_discount_amount()
self.apply_discount_amount()
if self.doc.doctype in ["Sales Invoice", "Purchase Invoice"]:
self.calculate_total_advance()
if self.doc.meta.get_field("other_charges_calculation"):
self.set_item_wise_tax_breakup()
def _calculate(self):
self.validate_conversion_rate()
self.calculate_item_values()
self.validate_item_tax_template()
self.initialize_taxes()
self.determine_exclusive_rate()
self.calculate_net_total()
self.calculate_taxes()
self.manipulate_grand_total_for_inclusive_tax()
self.calculate_totals()
self._cleanup()
self.calculate_total_net_weight()
def validate_item_tax_template(self):
for item in self.doc.get('items'):
if item.item_code and item.get('item_tax_template'):
item_doc = frappe.get_cached_doc("Item", item.item_code)
args = {
'tax_category': self.doc.get('tax_category'),
'posting_date': self.doc.get('posting_date'),
'bill_date': self.doc.get('bill_date'),
'transaction_date': self.doc.get('transaction_date')
}
item_group = item_doc.item_group
item_group_taxes = []
while item_group:
item_group_doc = frappe.get_cached_doc('Item Group', item_group)
item_group_taxes += item_group_doc.taxes or []
item_group = item_group_doc.parent_item_group
item_taxes = item_doc.taxes or []
if not item_group_taxes and (not item_taxes):
# No validation if no taxes in item or item group
continue
taxes = _get_item_tax_template(args, item_taxes + item_group_taxes, for_validate=True)
if item.item_tax_template not in taxes:
frappe.throw(_("Row {0}: Invalid Item Tax Template for item {1}").format(
item.idx, frappe.bold(item.item_code)
))
def validate_conversion_rate(self):
# validate conversion rate
company_currency = erpnext.get_company_currency(self.doc.company)
if not self.doc.currency or self.doc.currency == company_currency:
self.doc.currency = company_currency
self.doc.conversion_rate = 1.0
else:
validate_conversion_rate(self.doc.currency, self.doc.conversion_rate,
self.doc.meta.get_label("conversion_rate"), self.doc.company)
self.doc.conversion_rate = flt(self.doc.conversion_rate)
def calculate_item_values(self):
if not self.discount_amount_applied:
for item in self.doc.get("items"):
self.doc.round_floats_in(item)
if item.discount_percentage == 100:
item.rate = 0.0
elif item.price_list_rate:
if not item.rate or (item.pricing_rules and item.discount_percentage > 0):
item.rate = flt(item.price_list_rate *
(1.0 - (item.discount_percentage / 100.0)), item.precision("rate"))
item.discount_amount = item.price_list_rate * (item.discount_percentage / 100.0)
elif item.discount_amount and item.pricing_rules:
item.rate = item.price_list_rate - item.discount_amount
if item.doctype in ['Quotation Item', 'Sales Order Item', 'Delivery Note Item', 'Sales Invoice Item']:
item.rate_with_margin, item.base_rate_with_margin = self.calculate_margin(item)
if flt(item.rate_with_margin) > 0:
item.rate = flt(item.rate_with_margin * (1.0 - (item.discount_percentage / 100.0)), item.precision("rate"))
item.discount_amount = item.rate_with_margin - item.rate
elif flt(item.price_list_rate) > 0:
item.discount_amount = item.price_list_rate - item.rate
elif flt(item.price_list_rate) > 0 and not item.discount_amount:
item.discount_amount = item.price_list_rate - item.rate
item.net_rate = item.rate
if not item.qty and self.doc.get("is_return"):
item.amount = flt(-1 * item.rate, item.precision("amount"))
else:
item.amount = flt(item.rate * item.qty, item.precision("amount"))
item.net_amount = item.amount
self._set_in_company_currency(item, ["price_list_rate", "rate", "net_rate", "amount", "net_amount"])
item.item_tax_amount = 0.0
def _set_in_company_currency(self, doc, fields):
"""set values in ba | se currency"""
for f in fields:
val = flt(flt(doc.get(f), doc.precision(f)) * self.doc.conversion_rate, doc.precision("base_" + f))
doc.set("base_" + f, val)
def initialize_taxes(self):
for tax in self.doc.get("taxes"):
if not self.discount_amo | unt_applied:
validate_taxes_and_charges(tax)
validate_inclusive_tax(tax, self.doc)
tax.item_wise_tax_detail = {}
tax_fields = ["total", "tax_amount_after_discount_amount",
"tax_amount_for_current_item", "grand_total_for_current_item",
"tax_fraction_for_current_item", "grand_total_fraction_for_current_item"]
if tax.charge_type != "Actual" and \
not (self.discount_amount_applied and self.doc.apply_discount_on=="Grand Total"):
tax_fields.append("tax_amount")
for fieldname in tax_fields:
tax.set(fieldname, 0.0)
self.doc.round_floats_in(tax)
def determine_exclusive_rate(self):
if not any((cint(tax.included_in_print_rate) for tax in self.doc.get("taxes"))):
return
for item in self.doc.get("items"):
item_tax_map = self._load_item_tax_rate(item.item_tax_rate)
cumulated_tax_fraction = 0
for i, tax in enumerate(self.doc.get("taxes")):
tax.tax_fraction_for_current_item = self.get_current_tax_fraction(tax, item_tax_map)
if i==0:
tax.grand_total_fraction_for_current_item = 1 + tax.tax_fraction_for_current_item
else:
tax.grand_total_fraction_for_current_item = \
self.doc.get("taxes")[i-1].grand_total_fraction_for_current_item \
+ tax.tax_fraction_for_current_item
cumulated_tax_fraction += tax.tax_fraction_for_current_item
if cumulated_tax_fraction and not self.discount_amount_applied and item.qty:
item.net_amount = flt(item.amount / (1 + cumulated_tax_fraction))
item.net_rate = flt(item.net_amount / item.qty, item.precision("net_rate"))
item.discount_percentage = flt(item.discount_percentage,
item.precision("discount_percentage"))
self._set_in_company_currency(item, ["net_rate", "net_amount"])
def _load_item_tax_rate(self, item_tax_rate):
return json.loads(item_tax_rate) if item_tax_rate else {}
def get_current_tax_fraction(self, tax, item_tax_map):
"""
Get tax fraction for calculating tax exclusive amount
from tax inclusive amount
"""
current_tax_fraction = 0
if cint(tax.included_in_print_rate):
tax_rate = self._get_tax_rate(tax, item_tax_map)
if tax.charge_type == "On Net Total":
current_tax_fraction = tax_rate / 100.0
elif tax.charge_type == "On Previous Row Amount":
current_tax_fraction = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].tax_fraction_for_current_item
elif tax.charge_type == "On Previous Row Total":
current_tax_fraction = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].grand_total_fraction_for_current_item
if getattr(tax, "add_deduct_tax", None):
current_tax_fraction *= -1.0 if (tax.add_deduct_tax == "Deduct") else 1.0
return current_tax_fraction
def _get_tax_rate(self, tax, item_tax_map):
if tax.account_head in item_tax_map:
return flt(item_tax_map.get(tax.account_head), self.doc.precision("rate", tax))
else:
return tax.rate
def calculate_net_total(self):
self.doc.total_qty = self.doc.total = self.doc.base_total = self.doc.net_total = self.doc.base_net_total = 0.0
for item in self.doc.get("items"):
self.doc.total += item.amount
self |
TaxIPP-Life/til-france | til_france/pgm/output/stat_final.py | Python | gpl-3.0 | 737 | 0.001357 | import pdb
from pandas import HDFStore
import pandas.rpy.common as com
from rpy2.robjects import r
__version__ = "0.0"
def stat(year):
print("Calcul des statistiques individuelles")
simul = "C:/til/output/simul.h5"
simul = HDFStore(simul)
| df = simul['entities/register']
df = df.loc[df['period'] == year]
# export en R
not_bool = df.dtypes[df.dtypes != bool]
df = df.ix[:, not_bool.index]
r_dataframe = com.convert_to_r_dataframe(df)
name = 'result_sim'
r.assign(name, r_dataframe)
| file_dir = "C:/Myliam2/output/" + name + ".gzip"
phrase = "save(" + name + ", file='" + file_dir + "', compress=TRUE)"
r(phrase)
simul.close()
if __name__ == "__main__":
stat(2011)
|
eduNEXT/edunext-platform | lms/djangoapps/discussion/notification_prefs/tests.py | Python | agpl-3.0 | 9,839 | 0.001321 | # pylint: disable=consider-iterating-dictionary, missing-module-docstring
import json
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.urls import reverse
from mock import patch
from common.djangoapps.student.tests.factories import UserFactory
from common.djangoapps.util.testing import UrlResetMixin
from lms.djangoapps.discussion.notification_prefs import NOTIFICATION_PREF_KEY
from lms.djangoapps.discussion.notification_prefs.views import (
UsernameCipher,
ajax_disable,
ajax_enable,
ajax_status,
set_subscription
)
from openedx.core.djangoapps.user_api.models import UserPreference
@override_settings(SECRET_KEY="test secret key")
class NotificationPrefViewTest(UrlResetMixin, TestCase): # lint-amnesty, pylint: disable=missing-class-docstring
INITIALIZATION_VECTOR = b"\x00" * 16
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super().setUp()
self.user = UserFactory.create(username="testuser")
# Tokens are intentionally hard-coded instead of computed to help us
# avoid breaking existing links.
self.tokens = {
self.user: "AAAAAAAAAAAAAAAAAAAAAA8mMQo96FZfb1YKv1R5X6s=",
# Username with length equal to AES block length to test padding
UserFactory.create(username="sixteencharsuser"):
"AAAAAAAAAAAAAAAAAAAAAPxPWCuI2Ay9TATBVnfw7eIj-hUh6erQ_-VkbDqHqm8D",
# Even longer username
UserFactory.create(username="thisusernameissoveryverylong"):
"AAAAAAAAAAAAAAAAAAAAAPECbYqPI7_W4mRF8LbTaHuHt3tNXPggZ1Bke-zDyEiZ",
# Non-ASCII username
UserFactory.create(username="\u4e2d\u56fd"):
"AAAAAAAAAAAAAAAAAAAAAMjfGAhZKIZsI3L-Z7nflTA="
}
self.request_factory = RequestFactory()
def create_prefs(self):
"""Create all test preferences in the database"""
for (user, token) in self.tokens.items():
UserPreference.objects.get_or_create(user=user, key=NOTIFICATION_PREF_KEY, value=token)
def assertPrefValid(self, user):
"""Ensure that the correct preference for the user is persisted"""
pref = UserPreference.objects.get(user=user, key=NOTIFICATION_PREF_KEY)
assert pref
# check exists and only 1 (.get)
# now coerce username to utf-8 encoded str, since we test with non-ascii unicdoe above and
# the unittest framework has hard time coercing to unicode.
# decrypt also can't take a unicode input, so coerce its input to str
assert bytes(user.username.encode('utf-8')) == UsernameCipher().decrypt(str(pref.value))
def assertNotPrefExists(self, user):
"""Ensure that the user does not have a persisted preference"""
assert not UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY).exists()
# AJAX status view
def test_ajax_status_get_0(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
assert response.status_code == 200
assert json.loads(response.content.decode('utf-8')) == {'status': 0}
def test_ajax_status_get_1(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
assert response.status_code == 200
assert json.loads(response.content.decode('utf-8')) == {'status': 1}
def test_ajax_status_post(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_status(request)
assert response.status_code == 405
def test_ajax_status_anon_user(self):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_status, request)
# AJAX enable view
def test_ajax_enable_get(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_enable(request)
assert response.status_code == 405
self.assertNotPrefExists(self.user)
def test_ajax_enable_anon_user(self):
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_enable, request)
self.assertNotPrefExists(self.user)
@patch("os.urandom")
def test_ajax_enable_success(self, mock_urandom):
mock_urandom.return_value = self.INITIALIZATION_VECTOR
def test_user(user):
request = self.request_factory.post("dummy")
request.user = user
response = ajax_enable(request)
assert response.status_code == 204
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
def test_ajax_enable_already_enabled(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_enable(request)
assert response.status_code == 204
self.assertPrefValid(self.user)
def test_ajax_enable_distinct_values(self):
request = self.request_factory.post("dummy")
request.user = self.user
ajax_enable(request)
other_user = UserFactory.create()
request.user = other_user
ajax_enable(request)
assert UserPreference.objects.get(
user=self.user, key=NOTIFICATION_PREF_KEY
).value != UserPreference.objects.get(
user=other_user, key=NOTIFICATION_PREF_KEY
).value
# AJAX disable view
def test_ajax_disable_get(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_disable(request)
assert response.status_code == 405
self.assertPrefValid(self.user)
def test_ajax_disable_anon_user(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_disable, request)
self.assertPrefValid(self.user)
def test_ajax_disable_success(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
assert response.status_c | ode == 204
self.assertNotPrefExists(self.user)
def test_ajax_disable_already_disabled(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
assert response.status_code == 204
self.assertNotPrefExists(self.user)
# Unsubscribe view
def test_unsubs | cribe_post(self):
request = self.request_factory.post("dummy")
response = set_subscription(request, "dummy", subscribe=False)
assert response.status_code == 405
def test_unsubscribe_invalid_token(self):
def test_invalid_token(token, message):
request = self.request_factory.get("dummy")
self.assertRaisesRegex(Http404, f"^{message}$", set_subscription, request, token, False)
# Invalid base64 encoding
test_invalid_token("ZOMG INVALID BASE64 CHARS!!!", "base64url")
test_invalid_token("Non-ASCII\xff", "base64url")
test_invalid_token(self.tokens[self.user][:-1], "base64url")
# Token not long enough to contain initialization vector
test_invalid_token("AAAAAAAAAAA=", "initialization_vector")
# Token length not a multiple of AES block length
test_invalid_token(self.tokens[self.user][:-4], "aes")
# Invalid padding (ends in 0 byte)
# Encrypted value: "testuser" + "\x00" * 8
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAMoazRI7ePLjEWXN1N7keLw=", "padding")
# Invalid padding (en |
bkahlert/seqan-research | raw/workshop12/workshop2012-data-20120906/trunk/core/apps/razers2/tests/run_tests.py | Python | mit | 12,683 | 0.004179 | #!/usr/bin/env python
"""Execute the tests for the razers2 program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to | give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os | .path.dirname(__file__), '..', '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for razers2'
print '==========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'core/apps/razers2/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'core/apps/razers2', 'razers2')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run Adeno Single-End Tests
# ============================================================
# We run the following for all read lengths we have reads for.
for rl in [36, 100]:
# Run with default options.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1.stdout' % rl),
args=[ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1.out' % rl)],
to_diff=[(ph.inFile('se-adeno-reads%d_1.out' % rl),
ph.outFile('se-adeno-reads%d_1.out' % rl)),
(ph.inFile('se-adeno-reads%d_1.stdout' % rl),
ph.outFile('se-adeno-reads%d_1.stdout' % rl))])
conf_list.append(conf)
# Allow indels.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id.stdout' % rl),
args=['-id',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id.out' % rl)],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id.out' % rl),
ph.outFile('se-adeno-reads%d_1-id.out' % rl)),
(ph.inFile('se-adeno-reads%d_1-id.stdout' % rl),
ph.outFile('se-adeno-reads%d_1-id.stdout' % rl))])
conf_list.append(conf)
# Compute forward/reverse matches only.
for o in ['-r', '-f']:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)),
args=['-id', o,
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id%s.out' % (rl, o))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id%s.out' % (rl, o)),
ph.outFile('se-adeno-reads%d_1-id%s.out' % (rl, o))),
(ph.inFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)),
ph.outFile('se-adeno-reads%d_1-id%s.stdout' % (rl, o)))])
conf_list.append(conf)
# Compute with different identity rates.
for i in range(90, 101):
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)),
args=['-id', '-i', str(i),
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id-i%d.out' % (rl, i))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id-i%d.out' % (rl, i)),
ph.outFile('se-adeno-reads%d_1-id-i%d.out' % (rl, i))),
(ph.inFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)),
ph.outFile('se-adeno-reads%d_1-id-i%d.stdout' % (rl, i)))])
conf_list.append(conf)
# Compute with different output formats.
for of in [0, 1, 2, 3, 4, 5]:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id-of%d.stdout' % (rl, of)),
args=['-id', '-of', str(of),
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id-of%d.out' % (rl, of))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id-of%d.out' % (rl, of)),
ph.outFile('se-adeno-reads%d_1-id-of%d.out' % (rl, of))),
(ph.inFile('se-adeno-reads%d_1-id-of%d.stdout' % (rl, of)),
ph.outFile('se-adeno-reads%d_1-id-of%d.stdout' % (rl, of)))])
conf_list.append(conf)
# Compute with different sort orders.
for so in [0, 1]:
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)),
args=['-id', '-so', str(so),
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
'-o', ph.outFile('se-adeno-reads%d_1-id-so%d.out' % (rl, so))],
to_diff=[(ph.inFile('se-adeno-reads%d_1-id-so%d.out' % (rl, so)),
ph.outFile('se-adeno-reads%d_1-id-so%d.out' % (rl, so))),
(ph.inFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)),
ph.outFile('se-adeno-reads%d_1-id-so%d.stdout' % (rl, so)))])
conf_list.append(conf)
# ============================================================
# Run Adeno Paired-End Tests
# ============================================================
# We run the following for all read lengths we have reads for.
for rl in [36, 100]:
# Run with default options.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('pe-adeno-reads%d_2.stdout' % rl),
args=[ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
ph.inFile('adeno-reads%d_2.fa' % rl),
'-o', ph.outFile('pe-adeno-reads%d_2.out' % rl)],
to_diff=[(ph.inFile('pe-adeno-reads%d_2.out' % rl),
ph.outFile('pe-adeno-reads%d_2.out' % rl)),
(ph.inFile('pe-adeno-reads%d_2.stdout' % rl),
ph.outFile('pe-adeno-reads%d_2.stdout' % rl))])
conf_list.append(conf)
# Allow indels.
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('pe-adeno-reads%d_2-id.stdout' % rl),
args=['-id',
ph.inFile('adeno-genome.fa'),
ph.inFile('adeno-reads%d_1.fa' % rl),
ph.inFile('adeno-reads%d_2.fa' % rl),
'-o', ph.outFile('pe-adeno-reads%d_2-id.out' % rl)],
to_diff=[(ph.inFile('pe-adeno-reads%d_2-id.out' % rl),
ph.outFile('pe-adeno-reads%d_2-id.out' % rl)),
(ph.inFile('pe-adeno-reads%d_2-id.stdout' |
AlexBenyuh/python_training | fixture/application.py | Python | apache-2.0 | 897 | 0.003344 | from selenium.webdriver.chrome.webdriver import WebDriver
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from fixture.session import SessionHelper
from fixture.mk import MkHelper
from fixture.cas import CasHelper
class Application:
def __init__(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
self.session = SessionHelper(self)
self.mk = MkHelper(self)
self.cas = CasHelper(self)
def open_home_page(self):
| wd = self.wd
wd.get("https://new.kyivstar.ua/ecare/")
wd.maximize_window()
def destroy(self):
self.wd.quit()
def is_valid(self):
try:
self.wd.current_url
return True
except:
return | False
|
csparpa/pyowm | tests/unit/airpollutionapi30/test_airpollution_manager.py | Python | mit | 15,769 | 0.005454 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import unittest
from pyowm.airpollutionapi30 import airpollution_client, airpollution_manager, coindex, so2index, ozone, no2index, airstatus
from pyowm.config import DEFAULT_CONFIG
from pyowm.constants import AIRPOLLUTION_API_VERSION
from pyowm.utils import timestamps
from tests.unit.airpollutionapi30.test_ozone import OZONE_JSON
from tests.unit.airpollutionapi30.test_coindex import COINDEX_JSON
from tests.unit.airpollutionapi30.test_no2index import NO2INDEX_JSON
from tests.unit.airpollutionapi30.test_so2index import SO2INDEX_JSON
from tests.unit.airpollutionapi30.test_airstatus import AIRSTATUS_JSON, AIRSTATUS_MULTIPLE_JSON
class TestAirPollutionManager(unittest.TestCase):
__test_instance = airpollution_manager.AirPollutionManager('fakeapikey', DEFAULT_CONFIG)
def mock_get_coi_returning_coindex_around_coords(self, params_dict):
return json.loads(COINDEX_JSON)
def mock_get_o3_returning_ozone_around_coords(self, params_dict):
return json.loads(OZONE_JSON)
def mock_get_no2_returning_no2index_around_coords(self, params_dict):
return json.loads(NO2INDEX_JSON)
def mock_get_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_JSON)
def mock_get_forecast_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_historical_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_so2_returning_so2index_around_coords(self, params_dict):
return json.loads(SO2INDEX_JSON)
def test_instantiation_with_wrong_params(self):
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, None, dict())
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, 'apikey', None)
def test_get_uvindex_api_version(self):
result = self.__test_instance.airpollution_api_version()
self.assertIsInstance(result, tuple)
self.assertEqual(result, AIRPOLLUTION_API_VERSION)
def test_coindex_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.co_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertEqual('year', result.interval)
def test_coindex_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 200, 2.5)
def test_ozone_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon | )
self.assertIsNotNone(result.du_value)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
| airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertEqual('year', result.interval)
def test_ozone_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 200, 2.5)
def test_no2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.no2_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertEqual('year', result.interval)
def test_no2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_so2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result |
LynnHo/DCGAN-LSGAN-WGAN-WGAN-GP-Tensorflow | make_gif.py | Python | mit | 1,392 | 0.001437 | import imageio
import pylib as py
# ==============================================================================
# = param =
# ==============================================================================
py.arg('--save_path', default='pics/celeba_dragan.gif')
py.arg('--img_dir', default='output/celeba_gan_dragan/samples_training')
py.arg('--max_frames', type=int, default=0)
args = py.args()
py.mkdir(py.directory(args.save_path))
# ==============================================================================
# = make gif =
# ==============================================================================
# modified from https://www.tensorflow.org/alpha/tutorials/generative/dcgan
with imageio.get_writer(args.save_path, mode='I', fps=8) as writer:
filenames = sorted(py.glob(args.img_dir, '*.jpg'))
if args.max_frames:
step = len(filenames) | // args.max_frames
else:
step = 1
last = - | 1
for i, filename in enumerate(filenames[::step]):
frame = 2 * (i**0.3)
if round(frame) > round(last):
last = frame
else:
continue
image = imageio.imread(filename)
writer.append_data(image)
image = imageio.imread(filename)
writer.append_data(image)
|
syed/PerfKitBenchmarker | tests/aws_disk_integration_test.py | Python | apache-2.0 | 2,943 | 0.001699 | # Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for AWS scratch disks."""
import os
import unittest
from perfkitbenchmarker import pkb
from perfkitbenchmarker import test_util
MOUNT_POINT = '/scratch'
@unittest.skipUnless('PERFKIT_INTEGRATION' in os.environ,
'PERFKIT_INTEGRATION not in environment')
class AwsScratchDiskIntegrationTest(unittest.TestCase):
"""Integration tests for AWS disks.
Please see the section on integration testing in the README.
"""
def setUp(self):
pkb.SetUpPKB()
def testEBSStandard(self):
test_util.assertDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
| }
},
'disk_spec': {
'AWS': {
'disk_type': 'standard',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testEBSGP(self):
test_util.assertDiskMounts({
'vm_groups': {
| 'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm4.large',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'remote_ssd',
'disk_size': 2,
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
def testLocalSSD(self):
test_util.assertDiskMounts({
'vm_groups': {
'vm_group_1': {
'cloud': 'AWS',
'vm_spec': {
'AWS': {
'machine_type': 'm3.medium',
'zone': 'us-east-1a'
}
},
'disk_spec': {
'AWS': {
'disk_type': 'local',
'mount_point': MOUNT_POINT
}
}
}
}
}, MOUNT_POINT)
|
jesterKing/naali | tools/tests/avatar-test.py | Python | apache-2.0 | 4,768 | 0.013842 | #!/usr/local/bin/python
##
# TODO:
#
##
#import
import os
import time
import shutil
import subprocess
import getpass
import glob
import autoreport
from multiprocessing import Process
from optparse import OptionParser
import config
#variables
numberOfRuns = 2
numberOfClients = 2
testName = "avatar-test"
#folder config
scriptDir = config.scriptDir
rexbinDir = config.rexbinDir
testDir = config.testDir
logsDir = config.avatarLogsDir
wiresTempDir = config.wiresTempDir
tsharkLocation = config.tsharkLocation
#test chiru or loca | l server (default if no parameter is given)
#js = scriptDir + "/" + "autoConnect.js"
js = scriptDir + "/" + "autoConnectLocal.js"
def main():
operations(numberOfRuns)
movePc | apFiles()
cleanup()
autoreport.autoreport(testName)
os._exit(1)
def operations(numberOfRuns):
global avatarLogs
global timeStamp
#create a temp directory for pcap files
if not os.path.exists(wiresTempDir):
os.makedirs(wiresTempDir)
#create a directory for log files
timeStamp = time.strftime("%Y-%m-%dT%H:%M:%S%Z", time.localtime())
avatarLogs = logsDir + "/logs_" + timeStamp
os.makedirs(avatarLogs)
#change working directory for running tundra
os.chdir(rexbinDir)
for i in range(1,numberOfRuns+1):
run_clients(numberOfClients, i)
os.chdir(scriptDir)
def run_clients(numberOfClients, i):
for j in range(1,numberOfClients+1):
#os.name options: 'posix', 'nt', 'os2', 'mac', 'ce' or 'riscos'
if os.name == 'posix' or os.name == 'mac':
#modify wireshark temp folder owner (required for tshark when capturing all devices)
posixModTempfolder("root")
p1 = Process(target=posixRunTshark, args=(i, j))
p2 = Process(target=posixRunViewer, args=(i, j))
#start tshark
p1.start()
print "writing network log to file captured" + str(i) + "." + str(j) + ".pcap"
#start viewer
p2.start()
print "writing log to file naaliLog" + str(i) + "." + str(j) + ".log"
running = True
elif os.name == 'nt': #NOT TESTED
p1 = Process(target=ntRunTshark, args=(i, j))
p2 = Process(target=ntRunViewer, args=(i, j))
#start tshark
p1.start()
print "writing network log to file captured" + str(i) + "." + str(j) + ".pcap"
#start viewer
p2.start()
print "writing log to file naaliLog" + str(i) + "." + str(j) + ".log"
running = True
else:
print "os not supported"
#while-loop to check if viewer is running
while running == True:
if not p2.is_alive():
running = False
posixModTempfolder("user")
p1.terminate()
else:
time.sleep(1)
def posixModTempfolder(mode):
if mode == "root":
subprocess.call(['sudo','chown','root:root', wiresTempDir])
elif mode == "user":
subprocess.call(['sudo','chown','-R', getpass.getuser(), wiresTempDir])
def posixRunTshark(i,j):
subprocess.call(['sudo','tshark','-i','any','-f','port 2345','-w', wiresTempDir + '/captured' + str(i) + '.' + str(j) + '.pcap'])
def posixRunViewer(i,j):
x = "./Tundra --headless --run " + js + " 2>&1 | tee " + avatarLogs + "/naaliLog" + str(i) + "." + str(j) + ".log"
subprocess.call(x, shell = True)
def ntRunTshark(i,j):
subprocess.call([tsharkLocation + 'tshark.exe','-i','any','-f','port 2345','-w', wiresTempDir + '/captured' + str(i) + '.' + str(j) + '.pcap'])
def ntRunViewer(i,j):
x = rexbinDir + "/Tundra.exe --config viewer.xml --headless --run " + js + " 2>&1 | tee " + avatarLogs + "/naaliLog" + str(i) + "." + str(j) + ".log"
subprocess.call(x, shell = True)
def movePcapFiles():
print "moving pcap files..."
pcap = glob.glob(wiresTempDir + '/captured*.*.pcap')
for i in range(0,len(pcap)):
shutil.move(pcap[i], avatarLogs)
print "pcap files moved to " + avatarLogs
def cleanup():
print "cleaning up..."
if os.path.exists(wiresTempDir):
os.removedirs(wiresTempDir)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-r", "--runs", type="int", dest="numberOfRuns")
parser.add_option("-c", "--clients", type="int", dest="numberOfClients")
parser.add_option("-j", "--js", dest="js")
(options, args) = parser.parse_args()
if options.numberOfRuns:
numberOfRuns = options.numberOfRuns
if options.numberOfClients:
numberOfClients = options.numberOfClients
if options.js == "local":
js = scriptDir + "/autoConnectLocal.js"
if options.js == "chiru":
js = scriptDir + "/autoConnect.js"
main()
|
Nic30/hwtLib | hwtLib/amba/axiLite_comp/to_axi.py | Python | mit | 2,749 | 0.001455 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from typing import Optional
from hwt.serializer.mode import serializeParamsUniq
from hwt.synthesizer.interface import Interface
from hwt.synthesizer.param import Param
from hwtLib.abstract.busBridge import BusBridge
from hwtLib.amba.axi4 import Axi4, Axi4_addr
from hwtLib.amba.axi4Lite import Axi4Lite
from hwtLib.amba.constants import BURST_INCR, CACHE_DEFAULT, LOCK_DEFAULT, \
BYTES_IN_TRANS, QOS_DEFAULT
def interface_not_present_on_other(a: Interface, b: Interface):
"""
:return: set of interfaces which does not have an equivalent on "b"
"""
missing_on_b = []
for a in a._interfaces:
on_b = getattr(b, a._name, None)
if on_b is None:
| missing_on_b.append(a)
return set(missing_on_b)
@serializeParamsUniq
class AxiLite_to_Axi(BusBridge):
"""
Bridge from AxiLite interface t | o Axi3/4 interface
.. hwt-autodoc::
"""
def __init__(self, intfCls=Axi4, hdl_name_override:Optional[str]=None):
self.intfCls = intfCls
super(AxiLite_to_Axi, self).__init__(hdl_name_override=hdl_name_override)
def _config(self):
self.INTF_CLS = Param(self.intfCls)
self.intfCls._config(self)
self.DEFAULT_ID = Param(0)
def _declr(self):
with self._paramsShared():
self.s = Axi4Lite()
self.m = self.intfCls()._m()
def _impl(self) -> None:
axiFull = self.m
axiLite = self.s
def connect_what_is_same_lite_to_full(src, dst):
dst(src, exclude=interface_not_present_on_other(dst, src))
def connect_what_is_same_full_to_lite(src, dst):
dst(src, exclude=interface_not_present_on_other(src, dst))
def a_defaults(a: Axi4_addr):
a.id(self.DEFAULT_ID)
a.burst(BURST_INCR)
a.cache(CACHE_DEFAULT)
a.len(0)
a.lock(LOCK_DEFAULT)
a.size(BYTES_IN_TRANS(self.DATA_WIDTH // 8))
if hasattr(a, "qos"):
# axi3/4 difference
a.qos(QOS_DEFAULT)
connect_what_is_same_lite_to_full(axiLite.ar, axiFull.ar)
a_defaults(axiFull.ar)
connect_what_is_same_lite_to_full(axiLite.aw, axiFull.aw)
a_defaults(axiFull.aw)
connect_what_is_same_lite_to_full(axiLite.w, axiFull.w)
if hasattr(axiFull.w, "id"):
# axi3/4 difference
axiFull.w.id(self.DEFAULT_ID)
axiFull.w.last(1)
connect_what_is_same_full_to_lite(axiFull.r, axiLite.r)
connect_what_is_same_full_to_lite(axiFull.b, axiLite.b)
if __name__ == "__main__":
from hwt.synthesizer.utils import to_rtl_str
u = AxiLite_to_Axi()
print(to_rtl_str(u))
|
jeroyang/newsletter | newsletter/__init__.py | Python | bsd-3-clause | 394 | 0 | #!/usr/bin/env python
# -*- | coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import versioneer
__author__ = 'Chia-Jung, Yang' |
__email__ = 'jeroyang@gmail.com'
__version__ = versioneer.get_version()
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
IgnitedAndExploded/pyfire | pyfire/auth/backends.py | Python | bsd-3-clause | 1,345 | 0 | # -*- coding: utf-8 -*-
"""
pyfire.auth.backends
~~~~~~~~~~~~~~~~~~~~
Credential validation backends
:copyright: 2011 by the pyfire Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import warnings
class | InvalidAuthenticationError(Exception):
"""Raised upon fail in auth"""
pass
class CredentialValidator(object):
"""Base class to handle credential validation"""
def shutdown(self):
"""Shuts down needed connections and handles"""
pass
def validate_userpass(self, username, password):
"""Validate username and password"""
pass
def validate_token(self, token):
"""Validate a given token"""
pass
class DummyTrueValidator( | CredentialValidator):
"""Always returns true"""
def __init__(self):
warnings.warn("Do not use the DummyTrue validator in production",
RuntimeWarning)
super(DummyTrueValidator, self).__init__()
def validate_userpass(self, username, password):
return True
def validate_token(self, token):
return True
class DummyFalseValidator(CredentialValidator):
"""Always returns false"""
def validate_userpass(self, username, password):
return False
def validate_token(self, token):
return False
|
sweco-sepesd/gmlz | python/crc32_combine.py | Python | mit | 980 | 0.041837 | '''
https:// | github.com/toomuchio/pycrc32combine
'''
def gf2_matrix_square(square, mat):
for n in range(0, 32):
if (len(square) < (n + 1)):
square.append(gf2_matrix_times(mat, mat[n]))
else:
square[n] = gf2_matrix_times( | mat, mat[n])
return square
def gf2_matrix_times(mat, vec):
sum = 0
i = 0
while vec:
if (vec & 1):
sum = sum ^ mat[i]
vec = (vec >> 1) & 0x7FFFFFFF
i = i + 1
return sum
def crc32_combine(crc1, crc2, len2):
even = []
odd = []
if (len2 == 0):
return crc1
odd.append(0xEDB88320L)
row = 1
for n in range(1, 32):
odd.append(row)
row = row << 1
even = gf2_matrix_square(even, odd)
odd = gf2_matrix_square(odd, even)
while (len2 != 0):
even = gf2_matrix_square(even, odd)
if (len2 & 1):
crc1 = gf2_matrix_times(even, crc1)
len2 = len2 >> 1
if (len2 == 0):
break
odd = gf2_matrix_square(odd, even)
if (len2 & 1):
crc1 = gf2_matrix_times(odd, crc1)
len2 = len2 >> 1
crc1 = crc1 ^ crc2
return crc1
|
enthought/etsproxy | enthought/developer/tools/vet/class_browser.py | Python | bsd-3-clause | 114 | 0 | # proxy module
from __future__ import absolute_impor | t
from etsdevtools.developer.too | ls.vet.class_browser import *
|
pcmagic/stokes_flow | sphere/sphere_rs.py | Python | mit | 20,947 | 0.001337 | # coding=utf-8
# main codes, call functions at stokes_flow.py
# Zhang Ji, 20160410
import sys
import petsc4py
petsc4py.init(sys.argv)
# import warnings
# from memory_profiler import profile
import numpy as np
from src import stokes_flow as sf
# import stokes_flow as sf
from src.stokes_flow import problem_dic, obj_dic
from petsc4py import PETSc
from src.geo import *
from time import time
import pickle
from scipy.io import savemat, loadmat
from src. | ref_solution import *
# @profile
def view_matrix(m, **kwargs):
args = {
'vmin': None,
'vmax': None,
'title': ' ',
'cmap': None
}
for key, value in args.items():
if key in kwargs:
args[key] = kwargs[key]
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
cax = ax.matshow(m,
origin='lower',
vmin=args['vmin'],
vmax=args['vmax'],
| cmap=plt.get_cmap(args['cmap']))
fig.colorbar(cax)
plt.title(args['title'])
plt.show()
def save_vtk(problem: sf.StokesFlowProblem):
t0 = time()
ref_slt = sphere_slt(problem)
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = problem.get_kwargs()
fileHandle = problem_kwargs['fileHandle']
radius = problem_kwargs['radius']
u = problem_kwargs['u']
sphere_err = 0
# problem.vtk_obj(fileHandle)
# problem.vtk_velocity('%s_Velocity' % fileHandle)
# problem.vtk_self(fileHandle)
theta = np.pi / 2
sphere_check = sf.StokesFlowObj()
sphere_geo_check = sphere_geo() # force geo
if not 'r_factor' in problem_kwargs:
r_factor = np.ones(1)
else:
r_factor = problem_kwargs['r_factor']
sphere_err = r_factor.copy()
for i0, d0 in enumerate(r_factor):
sphere_geo_check.create_n(2000, radius * d0)
sphere_geo_check.set_rigid_velocity([u, 0, 0, 0, 0, 0])
sphere_geo_check.node_rotation(norm=np.array([0, 1, 0]), theta=theta)
sphere_check.set_data(sphere_geo_check, sphere_geo_check)
sphere_err[i0] = problem.vtk_check('%s_Check_%f' % (fileHandle, (radius * d0)), sphere_check, ref_slt)[0]
t1 = time()
PETSc.Sys.Print('%s: write vtk files use: %fs' % (str(problem), (t1 - t0)))
return sphere_err
def get_problem_kwargs(**main_kwargs):
OptDB = PETSc.Options()
radius = OptDB.getReal('r', 1)
deltaLength = OptDB.getReal('d', 0.3)
epsilon = OptDB.getReal('e', 0.3)
u = OptDB.getReal('u', 1)
fileHandle = OptDB.getString('f', 'sphere')
solve_method = OptDB.getString('s', 'gmres')
precondition_method = OptDB.getString('g', 'none')
plot_geo = OptDB.getBool('plot_geo', False)
debug_mode = OptDB.getBool('debug', False)
matrix_method = OptDB.getString('sm', 'rs')
restart = OptDB.getBool('restart', False)
twoPara_n = OptDB.getInt('tp_n', 1)
legendre_m = OptDB.getInt('legendre_m', 3)
legendre_k = OptDB.getInt('legendre_k', 2)
n_sphere_check = OptDB.getInt('n_sphere_check', 2000)
n_node_threshold = OptDB.getInt('n_threshold', 10000)
random_velocity = OptDB.getBool('random_velocity', False)
getConvergenceHistory = OptDB.getBool('getConvergenceHistory', False)
pickProblem = OptDB.getBool('pickProblem', False)
prb_index = OptDB.getInt('prb_index', -1)
n_obj = OptDB.getInt('n', 1)
n_obj_x = OptDB.getInt('nx', n_obj)
n_obj_y = OptDB.getInt('ny', n_obj)
distance = OptDB.getReal('dist', 3)
distance_x = OptDB.getReal('distx', distance)
distance_y = OptDB.getReal('disty', distance)
move_delta = np.array([distance_x, distance_y, 1])
# field_range: describe a sector area.
field_range = np.array([[-3, -3, -3], [n_obj_x - 1, n_obj_y - 1, 0] * move_delta + [3, 3, 3]])
n_grid = np.array([n_obj_x, n_obj_y, 1]) * 20
problem_kwargs = {
'name': 'spherePrb',
'matrix_method': matrix_method,
'deltaLength': deltaLength,
'epsilon': epsilon,
'delta': deltaLength * epsilon, # for rs method
'd_radia': deltaLength / 2, # for sf method
'solve_method': solve_method,
'precondition_method': precondition_method,
'field_range': field_range,
'n_grid': n_grid,
'plot_geo': plot_geo,
'debug_mode': debug_mode,
'fileHandle': fileHandle,
'region_type': 'rectangle',
'twoPara_n': twoPara_n,
'legendre_m': legendre_m,
'legendre_k': legendre_k,
'radius': radius,
'u': u,
'random_velocity': random_velocity,
'n_obj_x': n_obj_x,
'n_obj_y': n_obj_y,
'move_delta': move_delta,
'restart': restart,
'n_sphere_check': n_sphere_check,
'n_node_threshold': n_node_threshold,
'getConvergenceHistory': getConvergenceHistory,
'pickProblem': pickProblem,
'prb_index': prb_index,
}
for key in main_kwargs:
problem_kwargs[key] = main_kwargs[key]
return problem_kwargs
def print_case_info(**problem_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
size = comm.Get_size()
fileHandle = problem_kwargs['fileHandle']
radius = problem_kwargs['radius']
deltaLength = problem_kwargs['deltaLength']
matrix_method = problem_kwargs['matrix_method']
u = problem_kwargs['u']
PETSc.Sys.Print('sphere radius: %f, delta length: %f, velocity: %f' % (radius, deltaLength, u))
err_msg = "Only 'pf', 'rs', 'tp_rs', and 'lg_rs' methods are accept for this main code. "
assert matrix_method in (
'rs', 'rs_plane', 'tp_rs', 'lg_rs', 'rs_precondition', 'tp_rs_precondition', 'lg_rs_precondition',
'pf'), err_msg
epsilon = problem_kwargs['epsilon']
if matrix_method in ('rs', 'rs_plane', 'rs_precondition', 'pf'):
PETSc.Sys.Print('create matrix method: %s, epsilon: %f'
% (matrix_method, epsilon))
elif matrix_method in ('tp_rs', 'tp_rs_precondition'):
twoPara_n = problem_kwargs['twoPara_n']
PETSc.Sys.Print('create matrix method: %s, epsilon: %f, order: %d'
% (matrix_method, epsilon, twoPara_n))
elif matrix_method in ('lg_rs', 'lg_rs_precondition'):
legendre_m = problem_kwargs['legendre_m']
legendre_k = problem_kwargs['legendre_k']
PETSc.Sys.Print('create matrix method: %s, epsilon: %f, m: %d, k: %d, p: %d'
% (matrix_method, epsilon, legendre_m, legendre_k, (legendre_m + 2 * legendre_k + 1)))
solve_method = problem_kwargs['solve_method']
precondition_method = problem_kwargs['precondition_method']
PETSc.Sys.Print('solve method: %s, precondition method: %s'
% (solve_method, precondition_method))
PETSc.Sys.Print('output file headle: ' + fileHandle)
PETSc.Sys.Print('MPI size: %d' % size)
# @profile
def main_fun(**main_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = get_problem_kwargs(**main_kwargs)
restart = problem_kwargs['restart']
fileHandle = problem_kwargs['fileHandle']
radius = problem_kwargs['radius']
deltaLength = problem_kwargs['deltaLength']
epsilon = problem_kwargs['epsilon']
u = problem_kwargs['u']
matrix_method = problem_kwargs['matrix_method']
n_obj_x = problem_kwargs['n_obj_x']
n_obj_y = problem_kwargs['n_obj_y']
move_delta = problem_kwargs['move_delta']
random_velocity = problem_kwargs['random_velocity']
getConvergenceHistory = problem_kwargs['getConvergenceHistory']
pickProblem = problem_kwargs['pickProblem']
if not restart:
print_case_info(**problem_kwargs)
sphere_geo0 = sphere_geo() # force geo
sphere_geo0.create_delta(deltaLength, radius)
# # DBG
# nodes = ( |
rzanluchi/keyard | tests/app/test_app.py | Python | mit | 6,392 | 0.000156 | # -*- coding: utf-8 -*-
import falcon
import falcon.testing
import json
import mock
from keyard.app import resource
from keyard.app.utils import prepare_app
from keyard.app.middlewares import requireJSON
class TestKeyardResource(falcon.testing.TestBase):
def before(self):
self.resource = resource.KeyardResource()
self.resource.api = mock.MagicMock()
self.api.add_route('/keyard', self.resource)
prepare_app(self.api)
def test_get(self):
self.resource.api.get_service.return_value = ["localhost:8080"]
body = self.simulate_request('keyard',
query_string="service_name=web")
parsed_body = json.loads(body[0])
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.assertIn('application/json',
self.srmock.headers_dict.get('content-type'))
self.assertEqual(parsed_body.get('result'), ['localhost:8080'])
self.resource.api.get_service.assert_called_with('web', None, None)
def test_get_with_version(self):
self.resource.api.get_service.return_value = ["localhost:8080"]
body = self.simulate_request(
'keyard', query_string="service_name=web&version=1.0")
parsed_body = json.loads(body[0])
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.assertEqual(parsed_body.get('result'), ['localhost:8080'])
self.resource.api.get_service.assert_called_with('web', '1.0', None)
def test_get_with_load_balancer(self):
self.resource.api.get_service.return_value = "localhost:8080"
body = self.simulate_request(
'keyard',
query_string="service_name=web&load_balancer_strategy=random")
parsed_body = json.loads(body[0])
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.assertEqual(parsed_body.get('result'), 'localhost:8080')
self.resource.api.get_service.assert_called_with('web', None, 'random')
def test_bad_get(self):
self.resource.api.get_service.return_value = "localhost:8080"
self.resource.api.get_service.side_effect = AssertionError
self.simulate_request('keyard')
self.assertEqual(self.srmock.status, falcon.HTTP_400)
self.resource.api.get_service.assert_called_with(None, None, None)
def test_post(self):
self.resource.api.register.return_value = True
self.simulate_request(
'keyard', method="POST",
body=json.dumps({'service_name': 'web', 'version': '1.0',
'location': 'localhost:8888'}))
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.resource.api.register.assert_called_with('web', '1.0',
'localhost:8888')
def test_bad_post(self):
self.resource.api.register.return_value = True
self.resource.api.register.side_effect = AssertionError
self.simulate_request(
'keyard', method="POST",
body=json.dumps({'service_name': 'web', 'version': '1.0'}))
self.assertEqual(self.srmock.status, falcon.HTTP_400)
self.resource.api.register.assert_called_with('web', '1.0', None)
def test_put(self):
self.resource.api.health_check.return_value = True
self.simulate_request(
'keyard', method="PUT",
body=json.dumps({'service_name': 'web', 'version': '1.0',
'location': 'localhost:8888'}))
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.resource.api.health_check.assert_called_with('web', '1.0',
'localhost:8888')
def test_bad_put(self):
self.resource.api.health_check.return_value = True
self.resource.api.health_check.side_effect = AssertionError
self.simulate_request(
'keyard', method="PUT",
body=json.dumps({'service_name': 'web', 'version': '1.0'}))
self.assertEqual(self.srmock.status, falcon.HTTP_400)
self.resource.api.health_check.assert_called_with('web', '1 | .0', None)
def test_delete(self):
self.resource.api.unregister | .return_value = True
self.simulate_request(
'keyard', method="DELETE",
body=json.dumps({'service_name': 'web', 'version': '1.0',
'location': 'localhost:8888'}))
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.resource.api.unregister.assert_called_with('web', '1.0',
'localhost:8888')
def test_bad_delete(self):
self.resource.api.unregister.return_value = True
self.resource.api.unregister.side_effect = AssertionError
self.simulate_request(
'keyard', method="DELETE",
body=json.dumps({'service_name': 'web', 'version': '1.0'}))
self.assertEqual(self.srmock.status, falcon.HTTP_400)
self.resource.api.unregister.assert_called_with('web', '1.0', None)
class ResourceWIthMiddleware(falcon.testing.TestBase):
def before(self):
self.api = falcon.API(middleware=[requireJSON()])
self.resource = resource.KeyardResource()
self.resource.api = mock.MagicMock()
self.api.add_route('/keyard', self.resource)
prepare_app(self.api)
def test_put(self):
self.resource.api.health_check.return_value = True
body = self.simulate_request(
'keyard', method="PUT",
headers={'content-type': 'application/json'},
body=json.dumps({'service_name': 'web', 'version': '1.0',
'location': 'localhost:8888'}))
self.assertEqual(self.srmock.status, falcon.HTTP_200)
self.resource.api.health_check.assert_called_with('web', '1.0',
'localhost:8888')
def test_bad_put(self):
self.resource.api.health_check.return_value = True
body = self.simulate_request(
'keyard', method="PUT",
headers={'content-type': ''},
body=json.dumps({'service_name': 'web', 'version': '1.0',
'location': 'localhost:8888'}))
self.assertEqual(self.srmock.status, falcon.HTTP_415)
|
briandailey/django-pyres | django_pyres/__init__.py | Python | bsd-3-clause | 63 | 0.015873 | VERSION = (0, 1, 5)
__version__ = '.'.jo | in(map(str, VE | RSION))
|
HybridF5/jacket | jacket/db/storage/sqlalchemy/migrate_repo/versions/022_add_reason_column_to_service.py | Python | apache-2.0 | 890 | 0 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License fo | r the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
services = Table('storage_services', meta, a | utoload=True)
reason = Column('disabled_reason', String(255))
services.create_column(reason)
|
guorendong/iridium-browser-ubuntu | third_party/webgl/src/sdk/tests/deqp/genHTMLfromTest.py | Python | bsd-3-clause | 1,016 | 0.04626 | import os
import re
# Generate an HTML file for each .test file in the current directory
#
TEST_LIST_FILE = '00_test_list.txt';
TEMPLATE = 'template.html';
def genHTML(template, test):
contents = re.sub('___TEST_NAME___', "'" + test + "'", template);
filename = test + '.html';
print "Generating " + filename;
with open(test + '.html', 'w') as f:
f.write(contents);
return filename;
def process_test_files(template):
generated = [];
files = os.listdir(os.getcwd());
for file in files:
found = re.search('(^[^.].*)\.test$', file);
if found:
generated.append(genHTML(template,found.group(1)));
return generated;
def readTemplate():
contents = None;
with open(TEMPLATE, 'r') as f:
contents = f.read();
return contents;
template = readTemplate();
if (template):
test_list = process_test_files(template);
print "Generating " + TEST_LIST_FILE;
with open(TEST_LIST_FILE, 'w | ') as f:
for item in test_list:
f.write(item + '\n');
else:
print "Couldn't find template file: | " + TEMPLATE;
|
rec/DMXIS | Macros/Shapes/Adjust Shape/Phase +.py | Python | artistic-2.0 | 463 | 0.021598 | #===============================================================
# DMXIS Macro (c) 2010 db audioware limited
#===================== | ==========================================
sel = GetAllSelCh(False)
if len(sel)==0:
Message("Select some Pan/Tilt channels first!")
for ch in se | l:
nm = GetChName(ch).lower()
if nm=="pan" or nm=="tilt":
v = GetOscChase(ch);
v += 0.05
if v>1.0: v = v-1.0
SetOscChase(ch, v) |
ddasilva/numpy | tools/swig/test/testFarray.py | Python | bsd-3-clause | 5,156 | 0.002521 | #! /usr/bin/env python
from __future__ import division, absolute_import, print_function
# System imports
from distutils.util import get_platform
import os
import sys
import unittest
# Import NumPy
import numpy as np
major, minor = [ int(d) for d in np.__version__.split(".")[:2] ]
if major == 0: BadListError = TypeError
else: BadListError = ValueError
# Add the distutils-generated build directory to the python search path and then
# import the extension module
libDir = "lib.%s-%s" % (get_platform(), sys.version[:3])
sys.path.insert(0, os.path.join("build", libDir))
import Farray
######################################################################
class FarrayTestCase(unittest.TestCase):
def setUp(self):
self.nrows = 5
self.ncols = 4
self.array = Farray.Farray(self.nrows, self.ncols)
def testConstructor1(self):
"Test Farray size constructor"
self.failUnless(isinstance(self.array, Farray.Farray))
def testConstructor2(self):
"Test Farray copy constructor"
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i + j
arrayCopy = Farray.Farray(self.array)
self.failUnless(arrayCopy == self.array)
def testConstructorBad1(self):
"Test Farray size constructor, negative nrows"
self.assertRaises(ValueError, Farray.Farray, -4, 4)
def testConstructorBad2(self):
"Test Farray size constructor, negative ncols"
self.assertRaises(ValueError, Farray.Farray, 4, -4)
def testNrows(self):
"Test Farray nrows method"
self.failUnless(self.array.nrows() == self.nrows)
def testNcols(self):
"Test Farray ncols method"
self.failUnless(self.array.ncols() == self.ncols)
def testLen(self):
"Test Farray __len__ method"
self.failUnless(len(self.array) == self.nrows*self.ncols)
def testSetGet(self):
"Test Farray __setitem__, __getitem__ methods"
m = self.nrows
n = self.ncols
for i in range(m):
for j in range(n):
self.array[i, j] = i*j
for i in r | ange(m):
for j in range(n):
self.failUnless(self.array[i, j] == i*j)
def testSetBad1(self):
"Test Farray __setitem__ method, negative row"
self.assertRaises(IndexError, self.array.__setitem__, (-1, 3), 0)
def testSetBad2(self):
"Test Farray __setitem__ method, negative col"
self.assertRaises(IndexError, self.array.__set | item__, (1, -3), 0)
def testSetBad3(self):
"Test Farray __setitem__ method, out-of-range row"
self.assertRaises(IndexError, self.array.__setitem__, (self.nrows+1, 0), 0)
def testSetBad4(self):
"Test Farray __setitem__ method, out-of-range col"
self.assertRaises(IndexError, self.array.__setitem__, (0, self.ncols+1), 0)
def testGetBad1(self):
"Test Farray __getitem__ method, negative row"
self.assertRaises(IndexError, self.array.__getitem__, (-1, 3))
def testGetBad2(self):
"Test Farray __getitem__ method, negative col"
self.assertRaises(IndexError, self.array.__getitem__, (1, -3))
def testGetBad3(self):
"Test Farray __getitem__ method, out-of-range row"
self.assertRaises(IndexError, self.array.__getitem__, (self.nrows+1, 0))
def testGetBad4(self):
"Test Farray __getitem__ method, out-of-range col"
self.assertRaises(IndexError, self.array.__getitem__, (0, self.ncols+1))
def testAsString(self):
"Test Farray asString method"
result = """\
[ [ 0, 1, 2, 3 ],
[ 1, 2, 3, 4 ],
[ 2, 3, 4, 5 ],
[ 3, 4, 5, 6 ],
[ 4, 5, 6, 7 ] ]
"""
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i+j
self.failUnless(self.array.asString() == result)
def testStr(self):
"Test Farray __str__ method"
result = """\
[ [ 0, -1, -2, -3 ],
[ 1, 0, -1, -2 ],
[ 2, 1, 0, -1 ],
[ 3, 2, 1, 0 ],
[ 4, 3, 2, 1 ] ]
"""
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i-j
self.failUnless(str(self.array) == result)
def testView(self):
"Test Farray view method"
for i in range(self.nrows):
for j in range(self.ncols):
self.array[i, j] = i+j
a = self.array.view()
self.failUnless(isinstance(a, np.ndarray))
self.failUnless(a.flags.f_contiguous)
for i in range(self.nrows):
for j in range(self.ncols):
self.failUnless(a[i, j] == i+j)
######################################################################
if __name__ == "__main__":
# Build the test suite
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(FarrayTestCase))
# Execute the test suite
print("Testing Classes of Module Farray")
print("NumPy version", np.__version__)
print()
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(bool(result.errors + result.failures))
|
aristanetworks/EosSdk | examples/SimpleIntfAgent.py | Python | bsd-3-clause | 2,154 | 0.020891 | #!/usr/bin/env python
# Copyright (c) 2014 Arista Networks, Inc. All rights reserved.
# Arista Networks, Inc. Confidential and Proprietary.
import eossdk
import sys
# Listens to standard input and shuts down an interface when it
# receives a "shutdown" message. To exit, enter a blank line.
class MyTestAgent(eossdk.AgentHandler, eossdk.FdHandler):
def __init__(self, agentMgr, intfMgr, interfaceName):
print "This program controls the admin enabled state of the given interface"
print " - 'shutdown' will disable the interface"
print " - any other text will enable the interface"
print " - an empty line will quit this program"
self.agentMgr_ = agentMgr
self.intfMgr_ = intfMgr
self.intfObj_ = eossdk.IntfId(interfaceName)
eossdk.AgentHandler.__init__(self, agentMgr)
eossdk.FdHandler.__init__(self)
self.eventCount = 0
def on_initialized(self):
print "Initialized!"
self.watch_readable(0, True)
self.intfMgr_.admin_enabled_is(self.intfObj_, True)
self._printPrompt()
def on_readable(self, fd):
print "- Fd %d is readable" % fd
curEnabledStatus = ("enabled" if self.intfMgr_.admi | n_enabled(self.intfObj_)
else "disabled")
print "- %s is currently %s" % (self.intfObj_.to_string(), curEnabledStatus)
msg = sys.stdin.readline()
if msg.startswith("shut"):
print "Shutting down %s" % self.intfObj_. | to_string()
self.intfMgr_.admin_enabled_is(self.intfObj_, False)
self.eventCount += 1
elif msg.strip():
print "Enabling %s" % self.intfObj_.to_string()
self.intfMgr_.admin_enabled_is(self.intfObj_, True)
self.eventCount += 1
else:
print "Exiting!"
self.agentMgr_.exit()
self._printPrompt()
def _printPrompt(self):
print '> ',
sys.stdout.flush()
def main(args):
sdk = eossdk.Sdk()
testAgent = MyTestAgent(sdk.get_agent_mgr(), sdk.get_intf_mgr(), "Ethernet1")
sdk.main_loop(args)
print "Handled %d events" % testAgent.eventCount
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
pytroll/pytroll-file-utils | trollmoves/heartbeat_monitor.py | Python | lgpl-3.0 | 3,623 | 0.001933 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017
#
# Author(s):
#
# Lars Ørum Rasmussen <ras@dmi.dk>
# Janne Kotro <janne.kotro@fmi.fi>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Notes:
# - This is probably also the place to add possible alarm related plugins (fx. Nagios).
# - Timer reset from: http://code.activestate.com/recipes/577407-resettable-timer-class-a-little-enhancement-from-p/
import threading
import logging
import time
LOGGER = logging.getLogger(__name__)
# Seconds between heartbeats. A default value could be calculated after a few heartbeat.
# Newer version of posttroll is sending heartbeats including `min_interval`.
DEFAULT_MIN_INTERVAL = 30
class Monitor(threading.Thread):
"""Will monitor heartbeats.
Will set alarm event if no heartbeat received in specified time interval.
Will do nothing if no time interval scale defined.
"""
def __init__(self, alarm_event, **kwargs):
"""Will set `alarm_event` if no heartbeat i | n time interval `heartbeat_alarm_scale` times
heartbeat time interval.
"""
self._alarm_scale = float(kwargs.get("heartbeat_alarm_scale", 0))
self._alarm_event = alarm_event
self._interval = self._alarm_scale * DEFAULT_MIN_INTERVAL
self._finished = threading.Event()
threading.Thread.__init__(self)
def __call__(self, msg=None):
"""Receive a heartbeat (or not) to reset the timer. |
TODO: If possibility for blocking, add a queue.
"""
if self._alarm_scale:
if msg and msg.type == "beat":
try:
self._interval = self._alarm_scale * float(msg.data["min_interval"])
except (KeyError, AttributeError, TypeError, ValueError):
pass
LOGGER.debug("Resetting heartbeat alarm timer to %.1f sec", self._interval)
self._resetted = True
self._finished.set()
self._finished.clear()
def start(self):
if self._alarm_scale:
threading.Thread.start(self)
return self
def stop(self):
self._finished.set()
#
# Context interface.
#
def __enter__(self):
return self.start()
def __exit__(self, *exc):
return self.stop()
#
# Running in the thread.
#
def run(self):
LOGGER.debug("Starting heartbeat monitor with alarm scale %.2f", self._alarm_scale)
while not self._finished.is_set():
self._resetted = True
while self._resetted:
self._resetted = False
self._finished.wait(self._interval)
# prevent a race condition between a finished set / clear (?)
time.sleep(0.05)
if not self._finished.is_set():
self._set_alarm()
LOGGER.debug("Stopping heartbeat monitor")
def _set_alarm(self):
if self._alarm_event:
LOGGER.debug("Missing heartbeat alarm!")
self._alarm_event.set()
|
matousc89/padasip | padasip/preprocess/__init__.py | Python | mit | 380 | 0 | """
In this module are placed functions related to preprocessing of data.
"""
from pa | dasip.preprocess.standardize import standardize
from padasip.preprocess.standardize_back import standardize_back
from padasip | .preprocess.input_from_history import input_from_history
from padasip.preprocess.pca import PCA, PCA_components
from padasip.preprocess.lda import LDA, LDA_discriminants
|
oscar9/statistics_viewer | processmanager/processdirectory/stat1.py | Python | gpl-3.0 | 3,078 | 0.015595 | # encoding: utf-8
import gvsig
from gvsig import getResource
import addons.statistics_viewer.statisticprocess
reload(addons.statistics_viewer.statisticprocess)
from addons.statistics_viewer.statisticprocess.abstractprocess import AbstractStatisticProcess
import os
from addons.statistics_viewer.sv import svgraph
from org.gvsig.tools.dynobject import DynField, DynObject
class ChartService(DynField, DynObject):
def __init__(self):
pass
def getDynValue(self):
return ""
class StatProcess(AbstractStatisticProcess):
name = "Test Statatistic Graph 1"
description = "Age calculation"
idprocess = "view-graph-example-1"
allowZoomProcess = True
def processParameters(self): #o: dynclass
#dynxml = getResource(__file__, "SHPParameters.xml")
#dynclass = self.createDynClass(dynxml)
manager = self.getToolsLocator().getDynObjectManager()
#mydata = manager.createDynObject("MyStruct")
#d | ynclass = manager.get("Pro | cess","ProcessProperties")
#if dynclass == None:
#dynclass = manager.createDynClass("Process", "ProcessProperties", "aqui va la descripcion")
params = self.createInputParameters("Process", "ProcessProperties", "Description")
#dynclass.addDynFieldString("name").setMandatory(False)
#dynclass.addDynFieldString("title").setMandatory(True)
#dynclass.addDynFieldString("type").setMandatory(True)
#dynclass.addDynFieldBoolean("Min").setMandatory(True)
params.addDynFieldInt("Exageration").setMandatory(True)
di = params.addDynFieldObject("service12")
di.setClassOfValue(ChartService)
di.setDefaultDynValue([1,4])
#manager.add(dynclass)
#return dynclass
def process(self, params):
print "* params: ", params
# generate barchart plot
#ds = svgraph.svDefaultCategoryDataset()
#c = svgraph.createBarChart("Boxplot x01", ds)
#self.outputpanel = c.getChartPanel()
#self.outputchart = c.getChart()
# generate xyzchart plot
ds = svgraph.svDefaultXYZDataset()
param_name = params.get("name") #params.getField("name")
self.createdchart = svgraph.createXYZChart("Chart x01", ds)
### generate output console text
import random
numer = random.randint(100, 1000)
self.console = " ** Process calculated: Type " + str(numer)
self.console += "** User name: " + str(param_name)
self.console += """
output: example no valid
Attribute0 > 765.012954 AND Attribute1 <= 141.732431: Unsafe (143.0/1.0)
Attribute0 > 765.012954 AND Attribute3 > 163.157393 AND Attribute0 > 773.571142:
Unsafe (65.0)
"""
return self.createdchart
def main(*args):
print "* stat1.py: process"
proc = StatProcess()
dynobject = proc.createParameters()
dynobject.setDynValue("Exageration", 34)
dynobject.setDynValue("service12", 2)
result = proc.process(dynobject.getValues()) |
Peter-Collins/NormalForm | src/py/TaylorTest.py | Python | gpl-2.0 | 4,842 | 0.005163 | # This software is Copyright (C) 2004-2008 Bristol University
# and is released under the GNU General Public License version 2.
import unittest
from Powers import Powers
from Polynomial import Polynomial
import Taylor
class Sine(unittest.TestCase):
def test_terms(self):
s = Taylor.Sine(1, 0)
self.assert_(not s[0])
self.assert_(not s[2])
self.assert_(not s[4])
self.assert_(s[1] == Polynomial(1, terms={Powers((1,)): +1.0}))
self.assert_(s[3] == Polynomial(1, terms={Powers((3,)): -1.0/6.0}), s[3])
self.assert_(s[5] == Polynomial(1, terms={Powers((5,)): +1.0/120.0}), s[5])
def test_terms_embed(self):
s = Taylor.Sine(2, 1)
self.assert_(not s[0])
self.assert_(not s[2])
self.assert_(not s[4])
self.assert_(s[1] == Polynomial(2, terms={Powers((0, 1)): +1.0}))
self.assert_(s[3] == Polynomial(2, terms={Powers((0, 3)): -1.0/6.0}), s[3])
self.assert_(s[5] == Polynomial(2, terms={Powers((0, 5)): +1.0/120.0}), s[5])
def test_terms_cached(self):
s = Taylor.Cached(Taylor.Sine(2, 1))
self.assert_(not s[0])
self.assert_(not s[2])
self.assert_(not s[4])
self.assert_(s[1] == Polynomial(2, terms={Powers((0, 1)): +1.0}))
self.assert_(s[3] == Polynomial(2, terms={Powers((0, 3)): -1.0/6.0}), s[3])
self.assert_(s[5] == Polynomial(2, terms={Powers((0, 5)): +1.0/120.0}), s[5])
class Cosine(unittest.TestCase):
def test_terms(self):
s = Taylor.Cosine(1, 0)
self.assert_(not s[1])
self.assert_(not s[3])
self.assert_(not s[5])
self.assert_(s[0] == Polynomial(1, terms={Powers((0,)): +1.0}))
self.assert_(s[2] == Polynomial(1, terms={Powers((2,)): -1.0/2.0}), s[2])
self.assert_(s[4] == Polynomial(1, terms={Powers((4,)): +1.0/24.0}), s[4])
def test_terms_embed(self):
s = Taylor.Cosine(2, 1)
self.assert_(not s[1])
self.assert_(not s[3])
self.assert_(not s[5])
self.assert_(s[0] == Polynomial(2, terms={Powers((0, 0)): +1.0}))
self.assert_(s[2] == Polynomial(2, terms={Powers((0, 2)): -1.0/2.0}), s[2])
self.assert_(s[4] == Polynomial(2, terms={Powers((0, 4)): +1.0/24.0}), s[4])
class Sum(unittest.TestCase):
def test_sine_plus_cosine(self):
s = Taylor.Cached(Taylor.Sine(2, 0))
c = Taylor.Cached(Taylor.Cosine(2, 1))
r = s+c
self.assert_(r[0] == Polynomial(2, terms={Powers((0, 0)): +1.0}), r[0])
self.assert_(r[1] == Polynomial(2, terms={Powers((1, 0)): +1.0}), r[1])
self.assert_(r[2] == Polynomial(2, terms={Powers((0, 2)): -1.0/2.0}), r[2])
self.assert_(r[3] == Polynomial(2, terms={ | Powers((3, 0)): -1.0/6.0}), r[3])
self.assert_(r[4] == Polynomial(2, terms={Powers((0, 4)): +1.0/24.0}), r[4])
self.assert_(r[5] == Polynomial(2, terms={Powers((5, 0)): +1.0/120.0}), r[5])
class Product(unittest.TestCase):
def test_sine_times_cosine(self):
s = Taylor.Cached(Taylor.Sine(2, 0))
c = Taylor.Cached(Taylor.Cosine(2, 1))
r = s*c
self.assert_(not r[0])
s | elf.assert_(r[1] == Polynomial(2, terms={Powers((1, 0)): +1.0}), r[1])
def test_sine_times_sine(self):
s = Taylor.Cached(Taylor.Sine(2, 0))
r = s*s
self.assert_(not r[0])
self.assert_(not r[1])
self.assert_(r[2] == Polynomial(2, terms={Powers((2, 0)): +1.0}))
self.assert_(not r[3])
self.assert_(r[4] == Polynomial(2, terms={Powers((4, 0)): 2.0*(-1.0/6.0)}), r[4])
self.assert_(not r[5])
self.assert_(r[6] == Polynomial(2, terms={Powers((6, 0)): +2.0*1.0/120.0+1.0/36.0}), r[6])
class Bernoulli(unittest.TestCase):
def test_values(self):
b = Taylor.bernoulli
self.assertEquals(b(0), +1.0)
self.assertEquals(b(1), -1.0/2.0)
self.assertEquals(b(2), +1.0/6.0)
self.assertEquals(b(3), +0.0)
self.assertEquals(b(4), -1.0/30.0)
self.assertEquals(b(5), +0.0)
self.assertEquals(b(6), +1.0/42.0)
self.assertEquals(b(7), +0.0)
self.assertEquals(b(8), -1.0/30.0)
self.assertEquals(b(9), +0.0)
class Tanh(unittest.TestCase):
def test_terms(self):
t = Taylor.Tanh(1, 0)
self.assert_(not t[0])
self.assert_(t[1] == Polynomial(1, terms={Powers((1,)): 1.0}), t[1])
def suite():
suites = []
suites.append(unittest.makeSuite(Bernoulli))
suites.append(unittest.makeSuite(Tanh))
suites.append(unittest.makeSuite(Product))
suites.append(unittest.makeSuite(Sum))
suites.append(unittest.makeSuite(Sine))
suites.append(unittest.makeSuite(Cosine))
return unittest.TestSuite(suites)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
antoinecarme/pyaf | tests/artificial/transf_Integration/trend_ConstantTrend/cycle_5/ar_12/test_artificial_1024_Integration_ConstantTrend_5_12_20.py | Python | bsd-3-clause | 272 | 0.084559 | import | pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 5, transform = "Integration", sigma = 0.0, exog_count = 20, ar_order = 12 | ); |
yihuang/storymaker | story/models.py | Python | gpl-3.0 | 837 | 0 | from django.db import models
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class Story(models.Model):
title = models.CharField(max_length=256)
created_by = models.ForeignKey(User)
root = models.ForeignKey('StoryNode', related_name='+', null=True)
def get_absolute_ur | l(self):
return reverse('story:story', kwargs={'id': self.id})
class StoryNode(models.Model):
title = models.CharField(max_length=256)
content = models.TextField()
parent = models.ForeignKey('StoryNode', null=True)
story = mo | dels.ForeignKey(Story)
stars = models.IntegerField(default=0)
created_by = models.ForeignKey(User)
created_at = models.DateTimeField(auto_now_add=True)
def get_absolute_url(self):
return reverse('story:node', kwargs={'id': self.id})
|
pichuang/OpenNet | mininet-patch/examples/cluster/gre.py | Python | gpl-2.0 | 2,041 | 0.006859 | #!/usr/bin/python
"""
gre.py
mininet1 mininet2
-------- --------
| | | |
| s1=========s2 |
| | | | | |
| h1 | | h2 |
| | | |
-------- --------
=== : cross-link
| : link
Testing enviroment (cat /etc/hosts) :
192.168.59.100 mininet1
192.168.59.101 mininet2
"""
# from mininet.examples.cluster import MininetCluster
from mininet.log import setLogLevel
from mininet.node import Controller, RemoteController
from mininet.link import Link, Intf
from mininet.util import quietRun, errRun
from mininet.cluster.node import *
from mininet.cluster.net import *
from mininet.cluster.placer import *
from mininet.cluster.link import *
from mininet.cluster.clean import *
from mininet.cluster.cli import ClusterCLI as CLI
def demo():
CONTROLLER_IP="192.168.59.100"
CONTROLLER_PORT=6633
servers = [ 'mininet1', 'mininet2' ]
net = MininetCluster( controller=RemoteController, servers=servers)
c0 = net.addController( 'c0', controller=RemoteController, ip=CONTROLLER_IP, port=CONTROLLER_PORT)
# In mininet1
s1 = net.addSwitch('s1')
h1 = net.addHost('h1', ip="10.0.0.1")
net.addLink(s1, h1)
# In mininet2
s2 = net.addSwitch('s2', server="mininet2")
h2 = net.addHost('h2', ip="10.0.0.2", server="mininet2")
net.addLink(s2, h2)
net.st | art()
# Cross-link between mininet1 and mininet2
s1_endpoint_address = "192.168.59.100"
s2_endpoint_address = "192.168.59.101"
key = "99"
s1.cmdPrint("ovs-vsctl add-port s1 s1-eth2 -- set interface s1-eth2 type=gre options:local_ip=" + s1_endpoint_address + " options:remote_ip=" + s2_endpoint_address + " options:key=" + key)
s2.cmdPrint("ovs-vsctl add-port s2 s2-eth2 -- set interface s2-eth2 type=gre optio | ns:local_ip=" + s2_endpoint_address + " options:remote_ip=" + s1_endpoint_address + " options:key=" + key)
info("Recommand command:\n")
info("$ tcpdump -i eth0 proto GRE\n")
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
demo()
|
nocarryr/AV-Asset-Manager | avam/object_history/urls.py | Python | gpl-3.0 | 208 | 0.004808 | fr | om django.conf.urls import url
from object_history import views
urlpatterns = [
url(r'^object_history/(?P<pk>[0-9]+)/$',
views.ObjectHistory.as_view(),
name='object_history',
),
]
| |
hakonsbm/nest-simulator | pynest/nest/tests/test_errors.py | Python | gpl-2.0 | 2,240 | 0.000446 | # -*- coding: utf-8 -*-
#
# test_errors.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for error handling
"""
import unittest
import nest
@nest.ll_api.check_stack
class ErrorTestCase(unittest.TestCase):
"""Tests if errors are handled correctly"""
def test_Raise(self):
"""Error raising"""
def raise_custom_exception(exc, msg):
raise exc(msg)
message = "test"
exception = nest.kernel.NESTError
self.assertRaisesRegex(
exception, message, raise_custom_exception, exception, message)
def test_StackUnderFlow(self):
"""Stack underflow"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.kernel.NESTError, "StackUnderflow", nest.ll_api.sr, 'clear ;')
def test_DivisionByZero(self):
"""Division by zero"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.kernel.NESTError, "DivisionByZero", nest.ll_api.sr, '1 0 div')
def test_UnknownNode(self):
"""Unknown node"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.kern | el.NESTError, "UnknownNode", nest.Connect, (99, ), (99, ))
def test_UnknownModel(self):
"""Unknown model name"""
nest.ResetKernel()
self.assertRaisesRegex(
nest.kernel.NESTError, "UnknownModelName", nest.Create, -1)
def suite():
suite = unittest.makeSuite(ErrorTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity= | 2)
runner.run(suite())
if __name__ == "__main__":
run()
|
nmiranda/dedupe | tests/test_crossvalidation.py | Python | mit | 2,046 | 0.017595 | import dedupe
import unittest
import numpy
class KFoldsTest(unittest.TestCase):
def test_normal_k(self) :
l = list(dedupe.crossvalidation.kFolds(numpy.array(range(6)), 6))
assert len(l) == 6
def test_small_k(self) :
self.assertRaises(ValueError,
lambda : list(dedupe.crossvalidation.kFolds(numpy.array(range(6)), 1)))
def test_small_training(self) :
self.assertRaises(ValueError,
lambda : list(dedupe.crossvalidation.kFolds(numpy.array(range(1)), 2)))
def test_large_k(self) :
l = list(dedupe.crossvalidation.kFolds(numpy.array(range(4)), 10))
assert len(l) == 4
class scoreTest(unittest.TestCase) :
def test_no_true(self) :
score = dedupe.crossvalidation.scorePredictions(numpy.zeros(5),
numpy.ones(5))
assert score == 0
def test_no_predicted(self) :
score = dedupe.crossvalidation.scorePredictions(numpy.ones(5),
numpy.zeros(5))
assert score == 0
def test_all_predicted(self) :
score = dedupe.crossvalidation.scorePredictions(numpy.ones(5),
| numpy.ones(5))
assert score == 1
def test_all_predicted(self) :
score = dedupe.crossvalidation.scorePredictions(numpy.array([1,0,1,0]),
numpy.array([1,1,0,0]))
assert score == 0
class scoreReduction(unittest.TestCase) :
def test_nones(self) | :
avg_score = dedupe.crossvalidation.reduceScores([None, None])
assert avg_score == 0
def test_some_nones(self) :
avg_score = dedupe.crossvalidation.reduceScores([1, None])
assert avg_score == 1
def test_no_nones(self) :
avg_score = dedupe.crossvalidation.reduceScores([1, 0])
assert avg_score == 0.5
if __name__ == "__main__":
unittest.main()
|
Code4SA/pmg-cms-2 | pmg/models/posts.py | Python | apache-2.0 | 2,048 | 0.000488 | from builtins import str
from sqlalchemy import func, sql
from sqlalchemy.orm import validates
from .base import FileLinkMixin, ApiResource
from pmg import db
class Post(ApiResource, db.Model):
__tablename__ = "post"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String, nullable=False)
slug = db.Column(db.String, nullable=False, unique=True, index=True)
featured = db.Column(
db.Boolean(),
default=False,
server_default=sql.expression.false(),
nullable=False,
index=True,
)
body = db.Column(db.Text)
date = db.Column(
db.DateTime(timezone=True),
index=True,
unique=False,
nullable=False,
server_default=func.now(),
)
files = db.relationship("PostFile", lazy="joined")
created_at = db.Column(
db.DateTime(timezone=True),
index=True,
unique=False,
nullable=False,
server_default=func.now(),
)
updated_at = db.Column(
db.DateTime(timezone=True),
server_default=func.now(),
onupdate=func.current | _timestamp(),
)
def get_preview_image(self):
if self.files:
return self.files[0].file
else:
return None
@validates("slug")
def validate_slug(self, key, value):
return value.strip("/")
def __str__(self):
return str(self.title)
class PostFile(FileLinkMixin, db.Model):
__tablename__ = "post_files"
id = db.Column(db.Integer, primary_key=True)
post_id = d | b.Column(
db.Integer,
db.ForeignKey("post.id", ondelete="CASCADE"),
index=True,
nullable=False,
)
post = db.relationship("Post")
file_id = db.Column(
db.Integer,
db.ForeignKey("file.id", ondelete="CASCADE"),
index=True,
nullable=False,
)
file = db.relationship("File", lazy="joined")
# Register all the resource types. This ensures they show up in the API and are searchable
ApiResource.register(Post)
|
axbaretto/beam | sdks/python/apache_beam/runners/worker/sdk_worker_main_test.py | Python | apache-2.0 | 4,025 | 0.003727 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a co | py of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writ | ing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for apache_beam.runners.worker.sdk_worker_main."""
# pytype: skip-file
import logging
import unittest
from hamcrest import all_of
from hamcrest import assert_that
from hamcrest import has_entry
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.value_provider import RuntimeValueProvider
from apache_beam.runners.worker import sdk_worker_main
from apache_beam.runners.worker import worker_status
class SdkWorkerMainTest(unittest.TestCase):
# Used for testing newly added flags.
class MockOptions(PipelineOptions):
@classmethod
def _add_argparse_args(cls, parser):
parser.add_argument('--eam:option:m_option:v', help='mock option')
parser.add_argument('--eam:option:m_option:v1', help='mock option')
parser.add_argument('--beam:option:m_option:v', help='mock option')
parser.add_argument('--m_flag', action='store_true', help='mock flag')
parser.add_argument('--m_option', help='mock option')
parser.add_argument(
'--m_m_option', action='append', help='mock multi option')
def test_status_server(self):
# Wrapping the method to see if it appears in threadump
def wrapped_method_for_test():
threaddump = worker_status.thread_dump()
self.assertRegex(threaddump, '.*wrapped_method_for_test.*')
wrapped_method_for_test()
def test_parse_pipeline_options(self):
assert_that(
sdk_worker_main._parse_pipeline_options(
'{"options": {' + '"m_option": "/tmp/requirements.txt", ' +
'"m_m_option":["beam_fn_api"]' + '}}').get_all_options(),
all_of(
has_entry('m_m_option', ['beam_fn_api']),
has_entry('m_option', '/tmp/requirements.txt')))
assert_that(
sdk_worker_main._parse_pipeline_options(
'{"beam:option:m_option:v1": "/tmp/requirements.txt", ' +
'"beam:option:m_m_option:v1":["beam_fn_api"]}').get_all_options(),
all_of(
has_entry('m_m_option', ['beam_fn_api']),
has_entry('m_option', '/tmp/requirements.txt')))
assert_that(
sdk_worker_main._parse_pipeline_options(
'{"options": {"beam:option:m_option:v":"mock_val"}}').
get_all_options(),
has_entry('beam:option:m_option:v', 'mock_val'))
assert_that(
sdk_worker_main._parse_pipeline_options(
'{"options": {"eam:option:m_option:v1":"mock_val"}}').
get_all_options(),
has_entry('eam:option:m_option:v1', 'mock_val'))
assert_that(
sdk_worker_main._parse_pipeline_options(
'{"options": {"eam:option:m_option:v":"mock_val"}}').
get_all_options(),
has_entry('eam:option:m_option:v', 'mock_val'))
def test_runtime_values(self):
test_runtime_provider = RuntimeValueProvider('test_param', int, None)
sdk_worker_main.create_harness({
'CONTROL_API_SERVICE_DESCRIPTOR': '',
'PIPELINE_OPTIONS': '{"test_param": 37}',
},
dry_run=True)
self.assertTrue(test_runtime_provider.is_accessible())
self.assertEqual(test_runtime_provider.get(), 37)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
shweta97/pyta | examples/pylint/W0212_protected_access.py | Python | gpl-3.0 | 141 | 0.007092 | class MyClass:
def __init__(self | ):
self._num = 42
c = MyClass()
# Should not be accssing the protected attribute:
print(c. | _num)
|
jkorell/PTVS | Python/Tests/TestData/DjangoProject/urls.py | Python | apache-2.0 | 813 | 0.00861 | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'DjangoApplication1.views.home', name='home'),
# url(r'^Dja | ngoApplication1/', include('DjangoApplication1.fob.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^Oar/$', 'Oar.views.index'),
url(r'^/$', 'oar.views.main'),
url(r'^loop_nobom/$', 'Oar.views.loop_nobom'),
url(r'^loop/$', 'Oar.views.loop'),
| url(r'^loop2/$', 'Oar.views.loop2'),
)
|
KeoH/django-keoh-kstore | kstore/models/manufacturers.py | Python | bsd-2-clause | 104 | 0.019231 | #encoding:utf-8
from .abstracts import BaseCompanyModel
class Manufacturer(BaseCompanyMod | el):
| pass
|
imyousuf/smart-patcher | src/smart-patcher.py | Python | gpl-3.0 | 5,526 | 0.007781 | #!/usr/bin/python
# This programs is intended to manage patches and apply them automatically
# through email in an automated fashion.
#
# Copyright (C) 2008 Imran M Yousuf (imran@smartitengineering.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHO | UT ANY WARRANTY; without even the implied warran | ty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import poplib, email, re, sys, xmlConfigs, utils;
class ReferenceNode :
def __init__(self, node, emailMessage, references=list(), children=dict(), slotted=bool("false")):
self.node = node
self.children = dict(children)
self.references = references[:]
self.slotted = slotted
self.emailMessage = emailMessage
def get_node(self):
return self.node
def get_children(self):
return self.children
def set_node(self, node):
self.node = node
def set_children(self, children):
self.children = children
def get_references(self):
return self.references
def is_slotted(self):
return self.slotted
def set_slotted(self, slotted):
self.slotted = slotted
def get_message(self):
return self.emailMessage
def __repr__(self):
return self.node + "\nREF: " + str(self.references) + "\nChildren: " + str(self.children.keys()) + "\n"
def handleNode(currentNodeInAction, referenceNodeNow, referencesToCheck, patchMessageReferenceNode):
for reference in referencesToCheck[:] :
if reference in referenceNodeNow.get_children() :
referencesToCheck.remove(reference)
return patchMessageReferenceNode[reference]
if len(referencesToCheck) == 0 :
referenceNodeNow.get_children()[currentNodeInAction.get_node()] = currentNodeInAction
def makeChildren(patchMessageReferenceNode) :
ref_keys = patchMessageReferenceNode.keys()
ref_keys.sort()
for messageId in ref_keys:
referenceNode = patchMessageReferenceNode[messageId]
utils.verboseOutput(verbose, "Managing Message Id:", referenceNode.get_node())
referenceIds = referenceNode.get_references()
referenceIdsClone = referenceIds[:]
utils.verboseOutput(verbose, "Cloned References: ", referenceIdsClone)
if len(referenceIds) > 0 :
nextNode = patchMessageReferenceNode[referenceIdsClone[0]]
referenceIdsClone.remove(referenceIdsClone[0])
while nextNode != None :
utils.verboseOutput(verbose, "Next Node: ", nextNode.get_node())
utils.verboseOutput(verbose, "Curent Node: ", referenceNode.get_node())
utils.verboseOutput(verbose, "REF: ", referenceIdsClone)
nextNode = handleNode(referenceNode, nextNode, referenceIdsClone, patchMessageReferenceNode)
if __name__ == "__main__":
arguments = sys.argv
verbose = "false"
pseudoArgs = arguments[:]
while len(pseudoArgs) > 1 :
argument = pseudoArgs[1]
if argument == "-v" or argument == "--verbose" :
verbose = "true"
pseudoArgs.remove(argument)
utils.verboseOutput(verbose, "Checking POP3 for gmail")
try:
emailConfig = xmlConfigs.initializePopConfig("./email-configuration.xml")
myPop = emailConfig.get_pop3_connection()
numMessages = len(myPop.list()[1])
patchMessages = dict()
for i in range(numMessages):
utils.verboseOutput(verbose, "Index: ", i)
totalContent = ""
for content in myPop.retr(i+1)[1]:
totalContent += content + '\n'
msg = email.message_from_string(totalContent)
if 'subject' in msg :
subject = msg['subject']
subjectPattern = "^\[.*PATCH.*\].+"
subjectMatch = re.match(subjectPattern, subject)
utils.verboseOutput(verbose, "Checking subject: ", subject)
if subjectMatch == None :
continue
else :
continue
messageId = ""
if 'message-id' in msg:
messageId = re.search("<(.*)>", msg['message-id']).group(1)
utils.verboseOutput(verbose, 'Message-ID:', messageId)
referenceIds = []
if 'references' in msg:
references = msg['references']
referenceIds = re.findall("<(.*)>", references)
utils.verboseOutput(verbose, "References: ", referenceIds)
currentNode = ReferenceNode(messageId, msg, referenceIds)
patchMessages[messageId] = currentNode
currentNode.set_slotted(bool("false"))
utils.verboseOutput(verbose, "**************Make Children**************")
makeChildren(patchMessages)
utils.verboseOutput(verbose, "--------------RESULT--------------")
utils.verboseOutput(verbose, patchMessages)
except:
utils.verboseOutput(verbose, "Error: ", sys.exc_info())
|
JasonWyse/FacRankSvm_c | table6.py | Python | bsd-3-clause | 2,609 | 0.040245 | from parameter import *
from parse_digit import *
from os import system
cmd = "make -C tools >/dev/null 2>/dev/null;mkdir log model 2>/dev/null"
system(cmd)
#remove those method/data you are not interested in its result
methodlist = ['random-forest','gbdt']
data = ['MQ2007','MQ2008','MSLR','YAHOO_SET1','YAHOO_SET2','MQ2007-list','MQ2008-list']
print "\\begin{tabular}{l"+"|rrr"*len(methodlist) +"}"
if 'random-forest' in methodlist:
print "& \\multicolumn{3}{|c}{Random forests}",
if 'gbdt' in methodlist:
print "& \\multicolumn{3}{|c}{GBDT}",
print "\\\\"
print "& Training & Pairwise & "*len(methodlist), "\\\\"
print "Data set "+"& time (s) & accuracy & NDCG "*len(methodlist) +"\\\\"
print "\\hline"
for d in data:
o = []
for method in methodlist:
dp = log_path + d + '.' + method+ '.fewtrees.log'
try:
tmp_data = open(dp,'r').readlines()
except:
traindata = path + data_path[d]
testdata = path + test_path[d]
if method == 'random-forest':
cmd = "%s -f %s -F -z -p %s -k %s -t %s %s %s ./tmp_file >> %s 2>/dev/null"%(tree_exe,num_feature[d],num_processors, num_sampled_feature[d], tree_num_few[method],traindata,testdata,dp)
elif method == 'gbdt':
model = model_path + d + '.' + method + '.' + 'fewtrees.model'
cmd = "mpirun -np %s %s %s %s %s 4 100 0.1 -m >%s 2>> %s"%(8,gbrt_exe,traindata,num_instance[d],num_feature[d]+1,model,dp)
system('echo \'%s\' >> %s'%(cmd, dp))
system(cmd)
cmd = "cat %s|python %s ./tmp_exe"%(model,gbrt_compile_test)
system('echo \'%s\' >> %s'%(cmd, dp))
system(cmd)
cmd = "cat %s|./tmp_exe > ./tmp_file"%testdata
system('echo \'%s\' >> %s'%(cmd, dp))
system(cmd)
cmd = "tools/eval ./tmp_file %s >> %s;rm -f tmp_file ./tmp_exe*"%(testdata, dp)
system('echo \'%s\' >> %s'%(cmd, dp))
system(cmd)
tmp_data = open(dp,'r').readlines()
for l in tmp_data:
if 'time' in l:
time = l.split(' ')[-1].strip()
digit = FormatWithCommas("%5.1f",float(time))
digit = "$"+digit+"$"
o.append(digit)
if 'accuracy' in l:
acc = l.split(' ')[-2].strip().strip('%')
digit = "$%5.2f$"%float(acc)+"\\%"
o.append(digit)
if d == 'YAHOO_SET1' or d == ' | YAHOO_SET2':
if '(YAHOO) | ' in l:
ndcg = l.split(' ')[-2].strip()
digit = "$%1.4f$"%float(ndcg)
o.append(digit)
else:
if 'Mean' in l:
if d == 'MQ2007-list' or d == 'MQ2008-list':
digit = "NA"
else:
ndcg = l.split(' ')[-2].strip()
digit = "$%1.4f$"%float(ndcg)
o.append(digit)
print output_name[d],
for l in o:
print "& %s "%l,
print "\\\\"
print "\\end{tabular}"
|
guoyr/geo-caching | store_commands.py | Python | mit | 835 | 0.016766 | from twisted.protocols import amp
from constants import *
class SendSingleImageInfo(amp.Command):
arguments = [(USER_UID_KEY, amp.String()), (CACHE_UID_KEY, amp.String()), (IMAGE_UID_KEY, amp.String())]
response = [("success", amp.Boolean())]
#inform new master of master change
class InitiateMasterChange(amp.Command):
arguments = [(USER_UID_KEY, amp.String()), ("old_master_key", amp.String())]
response = [("ack", amp.Boolean())]
#new master issue this command to old master
class SendAllImages(amp.Command):
arguments = [(USER_UID_KEY, amp.String())]
response = [("image_info_list", amp.ListOf(amp.String()))]
#new master issue to old master to indicate master transfer is complete
class FinishMasterTransfer(amp.Command):
argument | s = [(USER_UID_KEY, amp.String())]
| response = [("success", amp.Boolean())]
|
derekjamescurtis/veritranspay | tests/response_bins_tests.py | Python | bsd-3-clause | 911 | 0.004391 | from unittest import TestCase
from veritranspay.response import BinResponse
class BinsRequestResponseTests(TestCase):
def setUp(self):
# example response data from
# --
self.response_json = {
"data": {
"country_name": "Indonesia",
"country_code": "id",
"brand": "visa",
"bin_type": "credit",
"bin_class": "gold",
"bin": "455633",
"bank_code": "bca",
"bank": "bank central asia"
}
}
self.parsed_response = BinResponse(status_code=200, s | tatus_message='',**self.response_json)
def test_status_code(self):
self.assertEqual(200, | self.parsed_response.status_code)
def test_response_data(self):
self.assertEqual(self.parsed_response.serialize().get('data'), self.response_json.get('data'))
|
vangj/py-bbn | pybbn/graph/dag.py | Python | apache-2.0 | 11,592 | 0.000863 | import json
import networkx as nx
from pybbn.graph.edge import Edge, EdgeType
from pybbn.graph.graph import Graph
from pybbn.graph.node import BbnNode
from pybbn.graph.variable import Variable
class Dag(Graph):
"""
Directed acyclic graph.
"""
def __init__(self):
"""
Ctor.
"""
Graph.__init__(self)
def get_n2i(self):
"""
Gets a map of node names to identifiers.
:return: Dictionary.
"""
n2i = {node.variable.name: k for k, node in self.nodes.items()}
return n2i
def get_i2n(self):
"""
Gets a map of node identifiers to names.
:return: Dictionary.
"""
i2n = {k: node.variable.name for k, node in self.nodes.items()}
return i2n
def get_parents(self, id):
"""
Gets the parent IDs of the specified node.
:param id: Node id.
:return: Array of parent ids.
"""
return [x for x in self.edge_map if id in self.edge_map[x]]
def get_children(self, node_id):
"""
Gets the children IDs of the specified node.
:param node_id: Node id.
:return: Array of children ids.
"""
return [x for x in self.edge_map[node_id]]
def __shouldadd__(self, edge):
"""
Checks if the specified directed edge should be added.
:param edge: Directed edge.
:return: A boolean indicating if the edge should be added.
"""
if EdgeType.DIRECTED != edge.type:
return False
parent = edge.i
child = edge.j
if parent.id == child.id:
return False
if child.id not in self.edge_map[parent.id] and parent.id not in self.edge_map[child.id]:
if not PathDetector(self, child.id, parent.id).exists():
return True
return False
def edge_exists(self, id1, id2):
"""
Checks if a directed edge exists between the specified id. e.g. id1 -> id2
:param id1: Node id.
:param id2: Node id.
:return: A boolean indicating if a directed edge id1 -> id2 exists.
"""
if id2 in self.edge_map[id1] and id1 not in self.edge_map[id2]:
return True
return False
def to_nx_graph(self):
"""
Converts this DAG to a NX DiGraph for visualization.
:return: A tuple, where the first item is the NX DiGraph and the second items are the node labels.
"""
g = nx.DiGraph()
labels = []
for k, node in self.nodes.items():
g.add_node(node.id)
t = (node.id, node.variable.name)
labels.append(t)
for k, edge in self.edges.items():
pa = edge.i.id
ch = edge.j.id
g.add_edges_from([(pa, c | h, {})])
return g, dict(labels)
class Bbn(Dag):
"""
BBN.
"""
def __init__(self):
"""
Ctor.
"""
Dag.__init__(self)
self.parents = {}
def get_parents_ordered(self, id):
"""
Gets the IDs of the specified node ordered.
:param id: ID of node.
:return: List of parent IDs sorted.
"""
return sorted(se | lf.parents[id]) if id in self.parents else []
def __edge_added__(self, edge):
if edge.j.id not in self.parents:
self.parents[edge.j.id] = []
if edge.i.id not in self.parents[edge.j.id]:
self.parents[edge.j.id].append(edge.i.id)
def __shouldadd__(self, edge):
"""
Checks if the specified directed edge should be added.
:param edge: Directed edge.
:return: A boolean indicating if the directed edge should be added.
"""
if isinstance(edge.i, BbnNode) and isinstance(edge.j, BbnNode):
return True
return Dag.__shouldadd__(edge)
@staticmethod
def to_csv(bbn, path):
"""
Converts the specified BBN to CSV format.
:param bbn: BBN.
:param path: Path to file.
:return: None.
"""
with open(path, 'w') as f:
for node in bbn.get_nodes():
v = node.variable
vals = ','.join(v.values)
probs = ','.join([str(p) for p in node.probs])
s_node = f'{v.id},{v.name},{vals},|,{probs}'
f.write(s_node)
f.write('\n')
for _, edge in bbn.edges.items():
t = 'directed' if edge.type == EdgeType.DIRECTED else 'undirected'
s_edge = f'{edge.i.id},{edge.j.id},{t}'
f.write(s_edge)
f.write('\n')
@staticmethod
def from_csv(path):
"""
Converts the BBN in CSV format to a BBN.
:param path: Path to CSV file.
:return: BBN.
"""
with open(path, 'r') as f:
nodes = {}
edges = []
for line in f:
tokens = line.split(',')
if 3 == len(tokens):
edge = int(tokens[0]), int(tokens[1])
edges.append(edge)
else:
tokens = line.split('|')
v_part = [item.strip() for item in tokens[0].split(',') if len(item.strip()) > 0]
p_part = [item.strip() for item in tokens[1].split(',') if len(item.strip()) > 0]
i = int(v_part[0])
v = Variable(i, v_part[1], v_part[2:])
p = [float(p) for p in p_part]
node = BbnNode(v, p)
nodes[i] = node
bbn = Bbn()
for _, node in nodes.items():
bbn.add_node(node)
for edge in edges:
pa_id, ch_id = edge
pa = nodes[pa_id]
ch = nodes[ch_id]
bbn.add_edge(Edge(pa, ch, EdgeType.DIRECTED))
return bbn
@staticmethod
def to_dict(bbn):
"""
Gets a JSON serializable dictionary representation.
:param bbn: BBN.
:return: Dictionary.
"""
return {
'nodes': {n.id: n.to_dict() for n in bbn.get_nodes()},
'edges': [{'pa': edge.i.id, 'ch': edge.j.id} for _, edge in bbn.edges.items()]
}
@staticmethod
def from_dict(d):
"""
Creates a BBN from a dictionary (deserialized JSON).
:param d: Dictionary.
:return: BBN.
"""
def get_variable(d):
return Variable(d['id'], d['name'], d['values'])
def get_bbn_node(d):
return BbnNode(get_variable(d['variable']), d['probs'])
nodes = {k: get_bbn_node(n) for k, n in d['nodes'].items()}
edges = d['edges']
bbn = Bbn()
for k, n in nodes.items():
bbn.add_node(n)
for e in edges:
pa_id = e['pa']
ch_id = e['ch']
pa = nodes[pa_id] if pa_id in nodes else nodes[str(pa_id)]
ch = nodes[ch_id] if ch_id in nodes else nodes[str(ch_id)]
bbn.add_edge(Edge(pa, ch, EdgeType.DIRECTED))
return bbn
@staticmethod
def to_json(bbn, path):
"""
Serializes BBN to JSON.
:param bbn: BBN.
:param path: Path.
:return: None.
"""
s = json.dumps(Bbn.to_dict(bbn), indent=2)
with open(path, 'w') as f:
f.write(s)
@staticmethod
def from_json(path):
"""
Deserializes BBN from JSON.
:param path: Path.
:return: BBN.
"""
with open(path, 'r') as f:
d = json.loads(f.read())
bbn = Bbn.from_dict(d)
return bbn
class PathDetector(object):
"""
Detects path between two nodes.
"""
def __init__(self, graph, start, stop):
"""
Ctor.
:param graph: DAG.
:param start: Start node id.
:param stop: Stop node id.
"""
self.graph = graph
self.start = start
self.stop = stop
self.seen = set()
def exists(self):
"""
Check |
huaweiswitch/neutron | neutron/tests/unit/openvswitch/test_ovs_neutron_agent.py | Python | apache-2.0 | 73,059 | 0.000246 | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import sys
import mock
import netaddr
from oslo.config import cfg
import testtools
from neutron.agent.linux import async_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent.linux import utils
from neutron.common import constants as n_const
from neutron.openstack.common import log
from neutron.plugins.common import constants as p_const
from neutron.plugins.openvswitch.agent import ovs_neutron_agent
from neutron.plugins.openvswitch.common import constants
from neutron.tests import base
NOTIFIER = 'neutron.plugins.ml2.rpc.AgentNotifierApi'
OVS_LINUX_KERN_VERS_WITHOUT_VXLAN = "3.12.0"
FAKE_MAC = '00:11:22:33:44:55'
FAKE_IP1 = '10.0.0.1'
FAKE_IP2 = '10.0.0.2'
class CreateAgentConfigMap(base.BaseTestCase):
def test_create_agent_config_map_succeeds(self):
self.assertTrue(ovs_neutron_agent.create_agent_config_map(cfg.CONF))
def test_create_agent_config_map_fails_for_invalid_tunnel_config(self):
# An ip address is required for tunneling but there is no default,
# verify this for both gre and vxlan tunnels.
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_VXLAN],
group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_enable_tunneling(self):
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('tunnel_types', None, group='AGENT')
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'], [p_const.TYPE_GRE])
def test_create_agent_config_map_fails_no_local_ip(self):
# An ip address is required for tunneling but there is no default
cfg.CONF.set_override('enable_tunneling', True, group='OVS')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_fails_for_invalid_tunnel_type(self):
cfg.CONF.set_override('tunnel_types', ['foobar'], group='AGENT')
with testtools.ExpectedException(ValueError):
ovs_neutron_agent.create_agent_config_map(cfg.CONF)
def test_create_agent_config_map_multiple_tunnel_types(self):
cfg.CONF.set_override('local_ip', '10.10.10.10', group='OVS')
cfg.CONF.set_override('tunnel_types', [p_const.TYPE_GRE,
p_const.TYPE_VXLAN], group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['tunnel_types'],
| [p_const.TYPE_GRE, p_const.TYPE_VXLAN])
def test_create_agent_config_map_enable_ | distributed_routing(self):
self.addCleanup(cfg.CONF.reset)
# Verify setting only enable_tunneling will default tunnel_type to GRE
cfg.CONF.set_override('enable_distributed_routing', True,
group='AGENT')
cfgmap = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
self.assertEqual(cfgmap['enable_distributed_routing'], True)
class TestOvsNeutronAgent(base.BaseTestCase):
def setUp(self):
super(TestOvsNeutronAgent, self).setUp()
notifier_p = mock.patch(NOTIFIER)
notifier_cls = notifier_p.start()
self.notifier = mock.Mock()
notifier_cls.return_value = self.notifier
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
kwargs = ovs_neutron_agent.create_agent_config_map(cfg.CONF)
class MockFixedIntervalLoopingCall(object):
def __init__(self, f):
self.f = f
def start(self, interval=0):
self.f()
with contextlib.nested(
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_integration_br',
return_value=mock.Mock()),
mock.patch('neutron.plugins.openvswitch.agent.ovs_neutron_agent.'
'OVSNeutronAgent.setup_ancillary_bridges',
return_value=[]),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'create'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_secure_mode'),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'get_local_port_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.utils.get_interface_mac',
return_value='00:00:00:00:00:01'),
mock.patch('neutron.agent.linux.ovs_lib.'
'get_bridges'),
mock.patch('neutron.openstack.common.loopingcall.'
'FixedIntervalLoopingCall',
new=MockFixedIntervalLoopingCall)):
self.agent = ovs_neutron_agent.OVSNeutronAgent(**kwargs)
# set back to true because initial report state will succeed due
# to mocked out RPC calls
self.agent.use_call = True
self.agent.tun_br = mock.Mock()
self.agent.sg_agent = mock.Mock()
def _mock_port_bound(self, ofport=None, new_local_vlan=None,
old_local_vlan=None):
port = mock.Mock()
port.ofport = ofport
net_uuid = 'my-net-uuid'
fixed_ips = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
if old_local_vlan is not None:
self.agent.local_vlan_map[net_uuid] = (
ovs_neutron_agent.LocalVLANMapping(
old_local_vlan, None, None, None))
with contextlib.nested(
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'set_db_attribute', return_value=True),
mock.patch('neutron.agent.linux.ovs_lib.OVSBridge.'
'db_get_val', return_value=str(old_local_vlan)),
mock.patch.object(self.agent.int_br, 'delete_flows')
) as (set_ovs_db_func, get_ovs_db_func, delete_flows_func):
self.agent.port_bound(port, net_uuid, 'local', None, None,
fixed_ips, "compute:None", False)
get_ovs_db_func.assert_called_once_with("Port", mock.ANY, "tag")
if new_local_vlan != old_local_vlan:
set_ovs_db_func.assert_called_once_with(
"Port", mock.ANY, "tag", str(new_local_vlan))
if ofport != -1:
delete_flows_func.assert_called_once_with(in_port=port.ofport)
else:
self.assertFalse(delete_flows_func.called)
else:
self.assertFalse(set_ovs_db_func.called)
self.assertFalse(delete_flows_func.called)
def _setup_for_dvr_test(self, ofport=10):
self._port = mock.Mock()
self._port.ofport = ofport
self._port.vif_id = "1234-5678-90"
self.agent.enable_distributed_routing |
inikdom/rnn-speech | models/AcousticModel.py | Python | mit | 45,676 | 0.004335 | # coding=utf-8
"""
Based on the paper:
http://arxiv.org/pdf/1601.06581v2.pdf
And some improvements from :
https://arxiv.org/pdf/1609.05935v2.pdf
This model is:
Acoustic RNN trained with ctc loss
"""
import tensorflow as tf
from tensorflow.python.client import timeline
import numpy as np
import time
import os
from datetime import datetime
import logging
from random import randint
import util.audioprocessor as audioprocessor
import util.dataprocessor as dataprocessor
class AcousticModel(object):
def __init__(self, num_layers, hidden_size, batch_size, max_input_seq_length,
max_target_seq_length, input_dim, normalization, num_labels):
"""
Initialize the acoustic rnn model parameters
Parameters
----------
:param num_layers: number of lstm layers
:param hidden_size: size of hidden layers
:param batch_size: number of training examples fed at once
:param max_input_seq_length: maximum length of input vector sequence
:param max_target_seq_length: maximum length of ouput vector sequence
:param input_dim: dimension of input vector
:param normalization: boolean indicating whether or not to normalize data in a input batch
:param num_labels: the numbers of output labels
"""
# Store model's parameters
self.num_layers = num_layers
self.hidden_size = hidden_size
self.batch_size = batch_size
self.max_input_seq_length = max_input_seq_length
self.max_target_seq_length = max_target_seq_length
self.input_dim = input_dim
self.normalization = normalization
self.num_labels = num_labels
# Create object's variables for tensorflow ops
self.rnn_state_zero_op = None
self.rnn_keep_state_op = None
self.saver_op = None
# Create object's variable for result output
self.prediction = None
# Create object's variables for placeholders
self.input_keep_prob_ph = self.output_keep_prob_ph = None
self.inputs_ph = self.input_seq_lengths_ph = self.labels_ph = None
# Create object's variables for dataset's iterator input
self.iterator_get_next_op = None
self.is_training_var = tf.Variable(initial_value=False, trainable=False, name="is_training_var", dtype=tf.bool)
# Create object's variable for hidden state
self.rnn_tuple_state = None
# Create object's variables for training
self.input_keep_prob = self.output_keep_prob = None
self.global_step = None
self.learning_rate_var = None
# Create object variables for tensorflow training's ops
self.learning_rate_decay_op = None
self.accumulated_mean_loss = self.acc_mean_loss_op = self.acc_mean_loss_zero_op = None
self.accumulated_error_rate = self.acc_error_rate_op = self.acc_error_rate_zero_op = None
self.mini_batch = self.increase_mini_batch_op = self.mini_batch_zero_op = None
self.acc_gradients_zero_op = self.accumulate_gradients_op = None
self.train_step_op = None
# Create object's variables for tensorboard
self.tensorboard_dir = None
self.timeline_enabled = False
self.train_summaries_op = None
self.test_summaries_op = None
self.summary_writer_op = None
# Create object's variables for status checking
self.rnn_created = False
def create_forward_rnn(self):
"""
Create the forward-only RNN
Parameters
-------
:return: the logits
"""
if self.rnn_created:
logging.fatal("Trying to create the acoustic RNN but it is already.")
# Set placeholders for input
self.inputs_ph = tf.placeholder(tf.float32, shape=[self.max_input_seq_length, None, self.input_dim],
name="inputs_ph")
self.input_seq_lengths_ph = tf.placeholder(tf.int32, shape=[None], name="input_seq_lengths_ph")
# Build the RNN
self.global_step, logits, self.prediction, self.rnn_keep_state_op, self.rnn_state_zero_op,\
_, _, self.rnn_tuple_state = self._build_base_rnn(self.inputs_ph, self.input_seq_lengths_ph, True)
# Add the saving and restore operation
self.saver_op = self._add_saving_op()
return logits
def create_training_rnn(self, input_keep_prob, output_keep_prob, grad_clip, learning_rate, lr_decay_factor,
use_iterator=False):
"""
Create the training RNN
Parameters
----------
:param input_keep_prob: probability of keeping input signal for a cell during training
:param output_keep_prob: probability of keeping output signal from a cell during training
:param grad_clip: max gradient size (prevent exploding gradients)
:param learning_rate: learning rate parameter fed to optimizer
:param | lr_decay_factor: decay factor of the learning rate
:param use_iterator: if True then plug an iterator.get_next() operation for the input of the model, if None
placeholders are created instead
"""
| if self.rnn_created:
logging.fatal("Trying to create the acoustic RNN but it is already.")
# Store model parameters
self.input_keep_prob = input_keep_prob
self.output_keep_prob = output_keep_prob
if use_iterator is True:
mfcc_batch, input_lengths, label_batch = self.iterator_get_next_op
# Pad if the batch is not complete
padded_mfcc_batch = tf.pad(mfcc_batch, [[0, self.batch_size - tf.size(input_lengths)], [0, 0], [0, 0]])
# Transpose padded_mfcc_batch in order to get time serie as first dimension
# [batch_size, time_serie, input_dim] ====> [time_serie, batch_size, input_dim]
inputs = tf.transpose(padded_mfcc_batch, perm=[1, 0, 2])
# Pad input_seq_lengths if the batch is not complete
input_seq_lengths = tf.pad(input_lengths, [[0, self.batch_size - tf.size(input_lengths)]])
# Label tensor must be provided as a sparse tensor.
idx = tf.where(tf.not_equal(label_batch, 0))
sparse_labels = tf.SparseTensor(idx, tf.gather_nd(label_batch, idx),
[self.batch_size, self.max_target_seq_length])
# Pad sparse_labels if the batch is not complete
sparse_labels, _ = tf.sparse_fill_empty_rows(sparse_labels, self.num_labels - 1)
else:
# Set placeholders for input
self.inputs_ph = tf.placeholder(tf.float32, shape=[self.max_input_seq_length, None, self.input_dim],
name="inputs_ph")
self.input_seq_lengths_ph = tf.placeholder(tf.int32, shape=[None], name="input_seq_lengths_ph")
self.labels_ph = tf.placeholder(tf.int32, shape=[None, self.max_target_seq_length],
name="labels_ph")
inputs = self.inputs_ph
input_seq_lengths = self.input_seq_lengths_ph
label_batch = self.labels_ph
# Label tensor must be provided as a sparse tensor.
# First get indexes from non-zero positions
idx = tf.where(tf.not_equal(label_batch, 0))
# Then build a sparse tensor from indexes
sparse_labels = tf.SparseTensor(idx, tf.gather_nd(label_batch, idx),
[self.batch_size, self.max_target_seq_length])
self.global_step, logits, prediction, self.rnn_keep_state_op, self.rnn_state_zero_op, self.input_keep_prob_ph,\
self.output_keep_prob_ph, self.rnn_tuple_state = self._build_base_rnn(inputs, input_seq_lengths, False)
# Add the train part to the network
self.learning_rate_var = self._add_training_on_rnn(logits, grad_clip, learning_rate, lr_decay_factor,
sparse_labels, input_seq_lengths, prediction)
# Add the saving and restore op |
pronexo-odoo/odoo-argentina | l10n_ar_wsafip_fe/__openerp__.py | Python | agpl-3.0 | 2,467 | 0.004866 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 OpenERP - Team de Localización Argentina.
# https://launchpad.net/~openerp-l10n-ar-localization
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#'test/invoice.yml',
#'test/inv_2prod.yml',
#'test/inv_2iva.yml',
#'test/query_invoices.yml'
{ 'active': False,
'author': 'OpenERP - Team de Localizaci\xc3\xb3n Argentina',
'category': 'Localization/Argentina',
'demo_xml': [],
'depends': ['l10n_ar_wsafip', 'l10n_ar_invoice'],
'description': '\n\nAPI e GUI para acceder a las Web Services de Factura Electr\xc3\xb3nica de la AFIP\n\n',
'init_xml': ['data/afip.wsfe_error.csv'],
'installable': True,
'license': 'AGPL-3',
'name': 'Argentina - Web Services de Factura Electr\xc3\xb3nica del AFIP',
'test': [ 'test/test_key.yml',
'test/partners.yml',
'test/products.yml',
'test/com_ri1.yml',
'test/com_ri2.yml',
'test/com_rm1.yml',
'test/journal.yml'],
'update_xml': [ 'data/wsafip_server.xml',
'data/invoice_view.xml',
'data/invoice_workflow.xml',
'data/journal_view.xml',
'data/wsfe_error_view.xml' | ,
'data/wsafip_fe_config.xml',
| 'security/ir.model.access.csv',
'wizard/query_invoices_view.xml',
'wizard/validate_invoices_view.xml'],
'version': '2.7.244',
'website': 'https://launchpad.net/~openerp-l10n-ar-localization'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
MelissaChan/MachineLearning | naiveBayes/bayes.py | Python | mit | 4,473 | 0.025242 | # __author__ = MelissaChan
# -*- coding: utf-8 -*-
# 16-2-14 下午4:37
# 词表转换向量
# 创建词汇表
def createVocabList(dataSet):
vocabSet = set([])
for document in dataSet:
vocabSet = vocabSet | set(document)
return list(vocabSet)
# 转换为向量
def setOfWords2Vec(vocablist,inputset):
returnVec = [0] * len(vocablist)
for word in inputset:
if word in vocablist:
returnVec[vocablist.index(word)] = 1 # 每个单词只出现一次
else:print "the word: %s is not in my vocabulary!" %word
return returnVec
def bagOfWord2Vec(vocablist,inputset):
returnVec = [0] * len(vocablist) # 每个单词出现多次
for word in inputset:
returnVec[vocablist.index(word)] += 1
return returnVec
# 测试数据集
def loadDataSet():
postingList = [['my','dog','has','flea','problems','please','help'],
['maybe','not','take','him','to','dog','park','stupid'],
['my','dalmatian','is','so','cute','I','love','him'],
['stop','posting','stupid','worthless','garbage'],
['mr','licks','ate','my','steak','how','to','stop','him'],
['quit','buying','worthless','stupid','dog','food']]
classVec = [0,1,0,1,0,1]
return postingList,classVec
# 训练函数
from numpy import *
def trainNB0(trainMatrix,trainCategory):
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory)/float(numTrainDocs)
p0num = ones(numWords); p1num = ones(numWords) # 为避免为乘数为0,初始化次数为1,分母为2
p0denom = 2.0; p1denom = 2.0
for i in range(numTrainDocs):
if trainCategory[i] == 1:
p1num += trainMatrix[i]
p1denom += sum(trainMatrix[i])
else:
p0num += trainMatrix[i]
p0denom += sum(trainMatrix[i])
p1vec = log(p1num/p1denom) # 自然对数避免下溢出
p0vec = log(p0num/p0denom)
return p0vec,p1vec,pAbusive
# 分类函数
def classify(vec2Classify,p0vec,p1vec,pClass1):
p1 = sum(vec2Classify * p1vec) + log(pClass1)
p0 = sum(vec2Classify * p0vec) + log(1.0 - pClass1)
if p1 > p0:
return 1
else:return 0
# 测试封装
postlist,classvec = loadDataSet()
myVocabList = createVocabList(postlist)
# print myVocabList
trainMat = []
for postinDoc in postlist:
trainMat.append(setOfWords2Vec(myVocabList,postinDoc))
p0v,p1v,pAb = trainNB0(array(trainMat),array(classvec))
testEntry = ['love','my','dalmatian']
testEntry2 = ['stupid','my','my']
thisDoc = array(setOfWords2Vec(myVocabList,testEntry))
thisDoc2 = array(setOfWords2Vec(myVocabList,testEntry2))
# print testEntry2,'classified as: ',classify(thisDoc2,p0v,p1v,pAb)
# print testEntry,'classified as: ',classify(thisDoc,p0v,p1v,pAb)
# print p0v
# print p1v
# 垃圾邮件过滤器
# 文本解析
def textParse(bigString):
import re
listOfTokens = re.split(r'\W*',bigString)
return [tok.lower() for tok in listOfTokens if len(tok) > 2]
# 过滤器
def spamTest():
# 导入并解析文本
docList=[]; classList = []; fullText =[]
for i in range(1,26):
wordList = textParse(open('/home/melissa/桌面/email/spam/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(1)
wordList = textParse(open('/home/melissa/桌面/email/ham/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
# 随机构建训练集
vocabList = createVocabList(docList)
trainingSet = range(50); testSet=[]
for i in range(10):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIn | dex])
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
for docIndex in trainingSet:
trainMat.append(bagOfWord2Vec(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
# 对测试集进行分类
errorCount = 0
for docIndex in testSet:
wordVector = bagOfWord2Vec(vocabList, docList[docInde | x])
if classify(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print "classification error",docList[docIndex]
print 'the error rate is: ',float(errorCount)/len(testSet)
for i in range(10):
spamTest() |
originaltebas/chmembers | app/informes/views.py | Python | mit | 42,730 | 0.000094 |
# app/informes/views.py
# coding: utf-8
from flask import redirect, render_template, url_for, request
from flask_login import current_user, login_required
from app.informes import informes
from app.informes.forms import FiltroInformePersonas
from app import db
from app.models import Miembro, EstadoCivil, TipoFamilia, Familia
from app.models import relacion_miembros_roles, Direccion
from app.models import TipoMiembro, GrupoCasero, Rol, RolFamiliar
from sqlalchemy import func
# Dentro de los informes hay algunos que tienen acceso
# de editor y otros que tienen solo administrador (Seg y Asis)
def check_edit_or_admin():
"""
Si no es admin o editor lo manda al inicio
"""
if not current_user.get_urole() >= 1:
return redirect(url_for("home.hub"))
def check_only_admin():
"""
Si no es admin o editor lo manda al inicio
"""
if not current_user.get_urole() == 2:
return redirect(url_for("home.hub"))
@informes.route('/informes/personas',
methods=['GET', 'POST'])
@login_required
def informe_personas():
"""
Listado de personas
"""
check_edit_or_admin()
form = FiltroInformePersonas()
form.EstadoCivil.choices = [(0, "Sin Filtros")] +\
[(row.id, row.nombre_estado)
for row in EstadoCivil.query.all()]
form.TipoMiembro.choices = [(0, "Sin Filtros")] +\
[(row.id, row.nombre_tipomiembro)
for row in TipoMiembro.query.all()]
form.RolFamiliar.choices = [(0, "Sin Filtros")] +\
[(row.id, row.nombre_rolfam)
for row in RolFamiliar.query.all()]
form.TipoFamilia.choices = [(0, "Sin Filtros")] +\
[(row.id, row.tipo_familia)
for row in TipoFamilia.query.all()]
form.GrupoCasero.choices = [(0, "Sin Filtros")] +\
[(row.id, row.nombre_grupo)
for row in GrupoCasero.query.all()]
if request.method == "POST":
if form.validate_on_submit():
'''
cuando entrar filtros
'''
roles = db.session.query(Rol).join(
relacion_miembros_roles,
relacion_miembros_roles.c.id_rol ==
Rol.id)\
.join(
Miembro,
Miembro.id ==
relacion_miembros_roles.c.id_miembro)\
.add_columns(
Miembro.id,
Rol.nombre_rol
)
nro_roles = db.session.query(Miembro.id,
func.count(Rol.id).label('contar'))\
.join(relacion_miembros_roles,
Miembro.id ==
relacion_miembros_roles.c.id_miembro)\
.join(Rol,
Rol.id ==
relacion_miembros_roles.c.id_rol)\
.group_by(Miembro).subquery()
query = db.session.query(Miembro)\
.outerjoin(Direccion,
Miembro.id_direccion ==
Direccion.id)\
.outerjoin(TipoMiembro,
Miembro.id_tipomiembro ==
TipoMiembro.id)\
.outerjoin(nro_roles,
Miembro.id ==
nro_roles.c.id)\
.outerjoin(Familia,
Miembro.id_familia ==
Familia.id)\
.outerjoin(GrupoCasero,
Miembro.id_grupocasero ==
GrupoCasero.id)\
.outerjoin(EstadoCivil,
| Miembro.id_estadocivil ==
EstadoCivil.id)\
.outerjoin(RolFamiliar,
| Miembro.id_rolfamiliar ==
RolFamiliar.id)\
.add_columns(
Miembro.id,
Miembro.fullname,
Miembro.email,
Miembro.telefono_fijo,
Miembro.telefono_movil,
Miembro.fecha_nac,
Miembro.fecha_inicio_icecha,
Miembro.fecha_miembro,
Miembro.fecha_bautismo,
EstadoCivil.nombre_estado,
RolFamiliar.nombre_rolfam,
Familia.apellidos_familia,
GrupoCasero.nombre_grupo,
TipoMiembro.nombre_tipomiembro,
Direccion.tipo_via,
Direccion.nombre_via,
Direccion.nro_via,
Direccion.portalescalotros_via,
Direccion.cp_via,
Direccion.ciudad_via,
Direccion.provincia_via,
Direccion.pais_via,
nro_roles.c.contar)
if form.EstadoCivil.data != 0:
user_attribute = getattr(Miembro, 'id_estadocivil')
user_filter = user_attribute == form.EstadoCivil.data
query = query.filter(user_filter)
if form.TipoFamilia.data != 0:
user_attribute = getattr(Familia, 'id_tipofamilia')
user_filter = user_attribute == form.TipoFamilia.data
query = query.filter(user_filter)
if form.RolFamiliar.data != 0:
user_attribute = getattr(Miembro, 'id_rolfamiliar')
user_filter = user_attribute == form.RolFamiliar.data
query = query.filter(user_filter)
if form.TipoMiembro.data != 0:
user_attribute = getattr(Miembro, 'id_tipomiembro')
user_filter = user_attribute == form.TipoMiembro.data
query = query.filter(user_filter)
if form.GrupoCasero.data != 0:
user_attribute = getattr(Miembro, 'id_grupocasero')
user_filter = user_attribute == form.GrupoCasero.data
query_miembros = query.all()
else:
# get
roles = db.session.query(Rol).join(relacion_miembros_roles,
relacion_miembros_roles.c.id_rol ==
Rol.id)\
.join(Miembro,
Miembro.id ==
relacion_miembros_roles.c.id_miembro)\
.add_columns(
Miembro.id,
Rol.nombre_rol
)
nro_roles = db.session.query(Miembro.id,
func.count(Rol.id).label('contar'))\
.outerjoin(relacion_miembros_roles,
Miembro.id ==
relacion_miembros_roles.c.id_miembro)\
|
christianurich/VIBe2UrbanSim | 3rdparty/opus/src/opus_gui/models_manager/controllers/submodel_editor.py | Python | gpl-2.0 | 42,990 | 0.004862 | # Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from lxml import etree
from opus_gui.util import common_dialogs
import copy
from lxml.etree import SubElement
from PyQt4 import QtGui, QtCore
from opus_gui.util.convenience import create_qt_action
from opus_core.configurations.xml_configuration import get_variable_name
from opus_gui.util.convenience import dictionary_to_menu, hide_widget_on_value_change
from opus_gui.general_manager.general_manager_functions import get_built_in_variable_nodes, get_built_in_constant_node
from opus_gui.general_manager.general_manager_functions import get_variable_nodes_per_dataset
from opus_gui.util.icon_library import IconLibrary
from opus_gui.main.controllers.dialogs.message_box import MessageBox
from opus_gui.models_manager.views.ui_submodel_editor import Ui_SubModelEditor
from opus_gui.models_manager.models.submodel_structure_item import SubmodelStructureItem
from opus_gui.models_manager.models.variable_selector_table_model import VariableSelectorTableModel
class SubModelEditor(QtGui.QDialog, Ui_SubModelEditor):
'''
Submodel Editing dialog.
The editor support three different structures of submodels:
I call these structures Plain Structures, Equation Structures and Nested Structures
The Plain Structures are submodels that only have a variable list.
The Equation Structures are submodels that have one or more <equation>, with each equation
having it's own variable list.
The Nested Structures have one or more levels of <nest>:s. Each nest can either have an
<equation> (that in turn have a variable list) or another nest.
Nests can not have variable lists themselves.
The assignment of variables happen on either the different <equation>:s (in the case of
Equation Structures and Nested Structures) or on the submodel itself if it has a Plain Structure
The GUI dialog is made somewhat simpler if the submodel has a Plain Structure, as some
functionality is not needed in this case.
'''
def __init__(self, project, parent_widget = None):
QtGui.QDialog.__init__(self, parent_widget)
self.setupUi(self)
self.project = project
self.submodel_node = None # the submodel that we are editing (a copy of actual submodel node)
self.active_variables_node = None
self.selector_table_model = VariableSelectorTableModel(project)
self.tree_structure_editor.header().setStretchLastSection(True)
self.tree_structure_editor.header().setMinimumWidth(50)
self.frame_name_warning.setVisible(False)
self.pb_remove_variable.setVisible(False)
# hide the name warning when the user edit the name
hide_widget_on_value_change(self.lbl_name_warning, self.le_name)
S = QtCore.SIGNAL # temporarily use a shorter name for all the connections below
self.connect(self.selector_table_model, S('layoutChanged()'), self._selector_model_column_resize)
signal = S("currentItemChanged(QTreeWidgetItem*, QTreeWidgetItem*)")
self.connect(self.tree_structure_selector, signal, self._change_structure_node)
signal = S('currentIndexChanged(int)')
self.connect(self.cbo_dataset_filter, signal, self._update_available_variables)
# Setup Variable Selector Table
self.table_selected_variables.setModel(self.selector_table_mod | el)
self.table_selected_variables.horizontalHeader().setStretchLastSection(True)
self.table_selected_variables.verticalHeader().hide()
| self.table_selected_variables.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
signal = S("customContextMenuRequested(const QPoint &)")
self.connect(self.table_selected_variables, signal, self._right_click_variables)
f_create_nest = lambda x = 'nest': self.tree_structure_editor.create_structure_node(x)
f_create_equation = lambda x = 'equation': self.tree_structure_editor.create_structure_node(x)
self.connect(self.pb_create_nest, S('released()'), f_create_nest)
self.connect(self.pb_create_equation, S('released()'), f_create_equation)
self.connect(self.buttonBox, S('rejected()'), self.reject)
self.connect(self.buttonBox, S('accepted()'), self.validate_submodel_and_accept)
# the label "OK" can be confusing when switching between the structure
# editor and the variable selector. Some users clicked "OK" to confirm the structure changes
# Therefore we set a more explicit label.
self.buttonBox.button(self.buttonBox.Ok).setText('Save and Close')
signal = S('structure_changed')
self.connect(self.tree_structure_editor, signal, self._update_submodel_structure_trees)
signal = S('clicked()')
self.connect(self.pb_update_model_structure, signal, self.update_model_nested_structure)
def _lookup_model_node_for(self, node):
''' seek up the tree structure for the <model> parent of the submodel node '''
while node is not None:
if node.tag == 'model':
return node
node = node.getparent()
return None
def _change_structure_node(self, new_item, old_item):
self._set_variable_list_node(new_item.variable_list() if new_item else None)
def _show_name_warning(self, text):
self.lbl_name_warning.setText(text)
self.frame_name_warning.setVisible(True)
self.le_name.selectAll()
self.le_name.setFocus()
def _set_variable_list_node(self, variable_list_node):
''' populate the list of selected variables with the variable_spec nodes of the given
variable_list_node '''
# "save' the changes to the previously edited variable_list before changing active node
self._apply_selected_variables(self.active_variables_node)
self.active_variables_node = variable_list_node
self.selector_table_model.clear()
if variable_list_node is not None:
for variable_spec_node in variable_list_node:
self.selector_table_model.add_variable_spec_node(variable_spec_node)
self.table_selected_variables.setEnabled(True)
self._selector_model_column_resize()
self.pb_show_picker.setEnabled(True)
else:
self.table_selected_variables.setEnabled(False)
self.pb_show_picker.setEnabled(False)
def _apply_selected_variables(self, variables_node):
if variables_node is None:
return
self.selector_table_model.apply_selected_variables(variables_node)
def _update_submodel_structure_trees(self):
''' updates both of the tree widgets to show the structure of self.submodel_node '''
self.tree_structure_selector.clear()
self.tree_structure_editor.clear()
self._populate_structure_tree(self.submodel_node, False, self.tree_structure_selector)
self._populate_structure_tree(self.submodel_node, True, self.tree_structure_editor)
for tree_widget in [self.tree_structure_editor, self.tree_structure_selector]:
tree_widget.resizeColumnToContents(0)
tree_widget.resizeColumnToContents(1)
# make the GUI a little simpler if the submodel is "plain" (i.e has no structural elements)
# by automatically hiding the "structure selector" tree
if self._in_simple_mode():
self.split_struct_variables.setSizes([0, 10]) # hide structure selector
# auto select the only variable_list
self._set_variable_list_node(self.submodel_node.find('variable_list'))
else:
# make sure that the structure widget is visible
if not self.pb_show_picker.isChecked():
self.stack_struct_picker.setCurrentIndex(1)
self.split_struct_variables.setSizes([10, 10])
self._set_variable_list_node(None)
# auto select the first structural element
item = self.tree_structure_selector.topLevelItem(0)
self.tree_structure_selector.setCurrentItem(item)
def _populate_structure_tree(self, |
unifycore/ryu | ryu/app/ofctl_rest.py | Python | apache-2.0 | 8,459 | 0.000236 | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
from webob import Response
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller import dpset
from ryu.controller.handler import MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_3
from ryu.lib import ofctl_v1_0
from ryu.lib import ofctl_v1_3
from ryu.app.wsgi import ControllerBase, WSGIApplication
LOG = logging.getLogger('ryu.app.ofctl_rest')
# REST API
#
## Retrieve the switch stats
#
# get the list of all switches
# GET /stats/switches
#
# get the desc stats of the switch
# GET /stats/desc/<dpid>
#
# get flows stats of the switch
# GET /stats/flow/<dpid>
#
# get ports stats of the switch
# GET /stats/port/<dpid>
#
## Update the switch stats
#
# add a flow entry
# POST /stats/flowentry/add
#
# modify all matching flow entries
# POST /stats/flowentry/modify
#
# delete all matching flow entries
# POST /stats/flowentry/delete
#
# delete all flow entries of the switch
# DELETE /stats/flowentry/clear/<dpid>
#
class StatsController(ControllerBase):
def __init__(self, req, link, data, **config):
super(StatsController, self).__init__(req, link, data, **config)
self.dpset = data['dpset']
self.waiters = data['waiters']
def get_dpids(self, req, **_kwargs):
dps = self.dpset.dps.keys()
body = json.dumps(dps)
return (Response(content_type='application/json', body=body))
def get_desc_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
desc = ofctl_v1_0.get_desc_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
desc = ofctl_v1_3.get_desc_stats(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(desc)
return (Response(content_type='application/json', body=body))
def get_flow_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
flows = ofctl_v1_0.get_flow_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
flows = ofctl_v1_3.get_flow_stats(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(flows)
return (Response(content_type='application/json', body=body))
def get_port_stats(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ports = ofctl_v1_0.get_port_stats(dp, self.waiters)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ports = ofctl_v1_3.get_port_stats(dp, self.waiters)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
body = json.dumps(ports)
return (Response(content_type='application/json', body=body))
def mod_flow_entry(self, req, cmd, **_kwargs):
try:
flow = eval(req.body)
except SyntaxError:
LOG.debug('invalid syntax %s', req.body)
return Response(status=400)
dpid = flow.get('dpid')
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if cmd == 'add':
cmd = dp.ofproto.OFPFC_ADD
elif cmd == 'modify':
cmd = dp.ofproto.OFPFC_MODIFY
elif cmd == 'delete':
cmd = dp.ofproto.OFPFC_DELETE
else:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.mod_flow_entry(dp, flow, cmd)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_flow_entry(dp, flow, cmd)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
def delete_flow_entry(self, req, dpid, **_kwargs):
dp = self.dpset.get(int(dpid))
if dp is None:
return Response(status=404)
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
ofctl_v1_0.delete_flow_entry(dp)
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
ofctl_v1_3.mod_flow_entry(dp, {}, dp.ofproto.OFPFC_DELETE)
else:
LOG.debug('Unsupported OF protocol')
return Response(status=501)
return Response(status=200)
class RestStatsApi(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION,
ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {
| 'dpset': dpset.DPSet,
'wsgi': WSGIApplication
}
def __init__(self, *args, **kwargs):
super(RestStatsApi, self).__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
wsgi = kwargs['wsgi']
self.waiters = {}
self.data = {}
self.data['dpset'] = self.dpset
self.data['waiters'] = self.waiters
mapper = wsgi.mapper
wsgi.registory['StatsController' | ] = self.data
path = '/stats'
uri = path + '/switches'
mapper.connect('stats', uri,
controller=StatsController, action='get_dpids',
conditions=dict(method=['GET']))
uri = path + '/desc/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_desc_stats',
conditions=dict(method=['GET']))
uri = path + '/flow/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_flow_stats',
conditions=dict(method=['GET']))
uri = path + '/port/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='get_port_stats',
conditions=dict(method=['GET']))
uri = path + '/flowentry/{cmd}'
mapper.connect('stats', uri,
controller=StatsController, action='mod_flow_entry',
conditions=dict(method=['POST']))
uri = path + '/flowentry/clear/{dpid}'
mapper.connect('stats', uri,
controller=StatsController, action='delete_flow_entry',
conditions=dict(method=['DELETE']))
def stats_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if dp.id not in self.waiters:
return
if msg.xid not in self.waiters[dp.id]:
return
lock, msgs = self.waiters[dp.id][msg.xid]
msgs.append(msg)
flags = 0
if dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION:
flags = dp.ofproto.OFPSF_REPLY_MORE
elif dp.ofproto.OFP_VERSION == ofproto_v1_3.OFP_VERSION:
flags = dp.ofproto.OFPMPF_REPLY_MORE
if msg.flags & flags:
return
del self.waiters[dp.id][msg.xid]
lock.set()
@set_ev_cls(ofp_event.EventOFPDescStatsReply, MAIN_DISPATCHER)
def desc_stats_reply_handler(self, ev):
self.stats_reply_handler(ev)
@set_ev_cls(of |
JaeGyu/PythonEx_1 | flask21_for_angular.py | Python | mit | 217 | 0.013825 | from flask import Flask, jsonify
from flask_restplus import Api, Resource, fields
app = Flask(__name__)
api = Api(app)
@app.route("/") |
def index():
return ""
if __n | ame__ == "__main__":
app.run(debug=True)
|
xydinesh/euwe | setup.py | Python | apache-2.0 | 1,302 | 0 | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.r | ead()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_mako',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
'WebTest',
'selenium',
'nose',
'pyramid_persona',
]
setup(name='euwe',
version='0.0',
descri | ption='euwe',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Dinesh Weerapurage',
author_email='xydinesh@gmail.com',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='euwe',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = euwe:main
[console_scripts]
initialize_euwe_db = euwe.scripts.initializedb:main
""",
)
|
Uberlearner/uberlearner | uberlearner/accounts/api/api.py | Python | mit | 1,436 | 0.005571 | from avatar.templatetags.avatar_tags import avatar_url
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from tastypie import fields
from tastypie.resources import ModelResource
from accounts.models import UserProfile
from main.api.authentication import UberAuthentication
from main.api.serializers import UberSerializer
class UserResource(ModelResource):
#profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True)
class Meta:
queryset = User.objects.all()
authentication = UberAuthentication()
#authorization = CourseAuthorization()
resource_name = 'users'
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
allowed_methods = ['get']
include_absolute_url = True
serializer = UberSerializer()
def d | ehydrate(self, bundle):
bundle.data['absolute_url'] = reverse('account_user_prof | ile_with_username', kwargs={'username': bundle.obj.username})
bundle.data['best_name'] = bundle.obj.profile.get_best_name()
bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID)
return bundle
class UserProfileResource(ModelResource):
class Meta:
queryset = UserProfile.objects.all()
authentication = UberAuthentication()
resource_name = 'profiles'
|
the-fascinator/fascinator-portal | src/main/config/portal/default/default/scripts/actions/authtest.py | Python | gpl-2.0 | 1,050 | 0.001905 | from com.googlecode.fascinator.common import JsonSimple
class AuthtestData:
def __init__(self):
pass
def __activate__(self, context):
request = context["request"]
response = context["response"]
writer = response.getPrintWriter("text/javascript; charset=UTF-8")
result = JsonSimple()
## Look for the JSONP callback to use
jsonpCallback = request.getParameter("callback")
if jsonpCallback is None:
jsonpCallback = request.getParameter("jsonp_callback")
| if jsonpCallback is None:
response.setStatus(403)
writer.println("Error: This interface only responds to JSONP")
writer.close()
return
if context["page"].authentication.is_logged_in():
result.getJsonObject().put("isAuthenticated", "true")
else:
result.getJsonObject().put("isAuthenticated", "false")
| writer.println(jsonpCallback + "(" + result.toString() + ")")
writer.close()
|
deependhulla/technomail-debian | files/html_oldx/groupoffice/modules/chat/converse.js-0.8.6/docs/source/conf.py | Python | gpl-3.0 | 9,866 | 0.005575 | # -*- coding: utf-8 -*-
#
# Converse.js documentation build configuration file, created by
# sphinx-quickstart on Fri Apr 26 20:48:03 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Converse.js'
copyright = u'2014, JC Brand'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.8.6'
# The full version, including alpha/beta/rc tags.
release = '0.8.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_bootstrap_theme
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# (Optional) Logo. Should be small enough to fit the navbar (ideally 24x24).
# Path should be relative to the ``_static`` files directory.
html_logo = "_static/conversejs_small.png"
# Theme options are theme-specific and customize the look and feel of a
# theme further.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "Converse.js",
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Table of Contents",
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
'navbar_links': [
("Homepage", "https://conversejs.org", True)
],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': True,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': True,
# Tab name for the current pages TOC. (Default: "Page")
'navbar_pagenav_name': "Current Page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
| #
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" ( | default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "footer",
# Bootswatch (http://bootswatch.com/) theme.
# Options are nothing (default) or the name of a valid theme
# such as "amelia" or "cosmo".
# 'bootswatch_theme': "yeti",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/favicon.ico"
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Conversejsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [how |
dongsenfo/pymatgen | pymatgen/alchemy/tests/test_transmuters.py | Python | mit | 4,616 | 0.001733 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import warnings
import unittest
import os
from pymatgen.alchemy.transmuters import CifTransmuter, PoscarTransmuter
from pymatgen.alchemy.filters import ContainsSpecieFilter
from pymatgen.transformations.standard_transformations import \
SubstitutionTransformation, RemoveSpeciesTransformation, \
OrderDisorderedStructureTransformati | on
from pymatgen.transformations.advanced_transformations import \
SuperTransformation
'''
Cre | ated on Mar 5, 2012
'''
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Mar 5, 2012"
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class CifTransmuterTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn", "Fe2+": "Mn2+"}))
tsc = CifTransmuter.from_filenames([os.path.join(test_dir,
"MultiStructure.cif")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "Li", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
class PoscarTransmuterTest(unittest.TestCase):
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn"}))
tsc = PoscarTransmuter.from_filenames([os.path.join(test_dir,
"POSCAR"),
os.path.join(test_dir,
"POSCAR")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
def test_transmuter(self):
tsc = PoscarTransmuter.from_filenames(
[os.path.join(test_dir, "POSCAR")])
tsc.append_transformation(RemoveSpeciesTransformation('O'))
self.assertEqual(len(tsc[0].final_structure), 8)
tsc.append_transformation(SubstitutionTransformation({"Fe":
{"Fe2+": 0.25,
"Mn3+": .75},
"P": "P5+"}))
tsc.append_transformation(OrderDisorderedStructureTransformation(),
extend_collection=50)
self.assertEqual(len(tsc), 4)
t = SuperTransformation([SubstitutionTransformation({"Fe2+": "Mg2+"}),
SubstitutionTransformation({"Fe2+": "Zn2+"}),
SubstitutionTransformation({"Fe2+": "Be2+"})])
tsc.append_transformation(t, extend_collection=True)
self.assertEqual(len(tsc), 12)
for x in tsc:
self.assertEqual(len(x), 5, 'something might be wrong with the number of transformations in the history') #should be 4 trans + starting structure
#test the filter
tsc.apply_filter(ContainsSpecieFilter(['Zn2+', 'Be2+', 'Mn4+'],
strict_compare=True, AND=False))
self.assertEqual(len(tsc), 8)
self.assertEqual(tsc.transformed_structures[0].as_dict()[
'history'][-1]['@class'], 'ContainsSpecieFilter')
tsc.apply_filter(ContainsSpecieFilter(['Be2+']))
self.assertEqual(len(tsc), 4)
#Test set_parameter and add_tag.
tsc.set_parameter("para1", "hello")
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['para1'], 'hello')
tsc.add_tags(["world", "universe"])
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['tags'],
["world", "universe"])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
skosukhin/spack | lib/spack/spack/cmd/debug.py | Python | lgpl-2.1 | 3,522 | 0 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import re
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack"
section = "developer"
level = "long"
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
sp.add_parser('create-db-tarball',
help="create a tarball of Spack's installation metadata")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime('%Y-%m-%d-%H%M%S')
gi | t = which('git')
if not git:
return 'nobranch-nogit-%s' % suffix
with working_dir(spack.spack_root):
if not os.path.isdir('.git'):
return 'nobranch.nogit.%s' % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git(
| 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
symbolic = re.sub(r'[^\w.-]', '-', symbolic)
# Get the commit hash too.
commit = git(
'rev-parse', '--short', 'HEAD', output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which('tar')
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(spack.store.root)
transform_args = []
if 'GNU' in tar('--version', output=str):
transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
else:
transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
wd = os.path.dirname(spack.store.root)
with working_dir(wd):
files = [spack.store.db._index_path]
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
files = [os.path.relpath(f) for f in files]
args = ['-czf', tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg('Created %s' % tarball_name)
def debug(parser, args):
action = {'create-db-tarball': create_db_tarball}
action[args.debug_command](args)
|
cgimenop/Excel2Testlink | ExcelParser/lib/openpyxl/chart/tests/test_reference.py | Python | mit | 2,862 | 0.004542 | from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
import pytest
from openpyxl.xml.functions import fromstring, tostring
from openpyxl.tests.helper import compare_xml
@pytest.fixture
def Reference():
from ..reference import Reference
return Reference
@pytest.fixture
def Worksheet():
class DummyWorksheet:
def __init | __(self, title="dummy"):
self.title = title
return DummyWorksheet
class TestReference:
def test_ctor(self, Reference, Worksheet):
ref = Reference(
worksheet=Worksheet(),
min_col=1,
min_row=1,
max_col=10,
max_row=12
)
assert str(ref) == "dummy!$A$1:$J$12"
def test_single_cell(self, Reference, Worksheet):
ref = Reference(Worksheet(), min_col=1, min_row=1)
| assert str(ref) == "dummy!$A$1"
def test_from_string(self, Reference):
ref = Reference(range_string="Sheet1!$A$1:$A$10")
assert (ref.min_col, ref.min_row, ref.max_col, ref.max_row) == (1,1, 1,10)
assert str(ref) == "Sheet1!$A$1:$A$10"
def test_cols(self, Reference):
ref = Reference(range_string="Sheet!A1:B2")
assert list(ref.cols) == [
('A1', 'A2'),
('B1', 'B2')
]
def test_rows(self, Reference):
ref = Reference(range_string="Sheet!A1:B2")
assert list(ref.rows) == [
('A1', 'B1'),
('A2', 'B2')
]
@pytest.mark.parametrize("range_string, cells",
[
("Sheet!A1:A5", ['A1', 'A2', 'A3', 'A4', 'A5']),
("Sheet!A1:E1", ['A1', 'B1', 'C1', 'D1', 'E1']),
]
)
def test_cells(self, Reference, range_string, cells):
ref = Reference(range_string=range_string)
assert list(ref.cells) == cells
@pytest.mark.parametrize("range_string, cell, min_col, min_row",
[
("Sheet1!A1:A10", 'A1', 1, 2),
("Sheet!A1:E1", 'A1', 2, 1),
]
)
def test_pop(self, Reference, range_string, cell, min_col, min_row):
ref = Reference(range_string=range_string)
assert cell == ref.pop()
assert (ref.min_col, ref.min_row) == (min_col, min_row)
@pytest.mark.parametrize("range_string, length",
[
("Sheet1!A1:A10", 10),
("Sheet!A1:E1", 5),
]
)
def test_length(self, Reference, range_string, length):
ref = Reference(range_string=range_string)
assert len(ref) == length
|
iftekeriba/softlayer-python | SoftLayer/CLI/server/list.py | Python | mit | 2,923 | 0 | """List hardware servers."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import columns as column_helper
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
import click
# pylint: disable=unnecessary-lambda
COLUMNS = [
column_helper.Column('guid', ('globalIdentifie | r',)),
column_helper.Column('primary_ip', ('primaryIpAddress',)),
column_helper.Column('backend_ip', ('primaryBackendIpAddress',)),
column_helper.Column('datacenter', ('datacenter', 'name')),
column_helper.Column(
'action',
| lambda server: formatting.active_txn(server),
mask='''
mask(SoftLayer_Hardware_Server)[activeTransaction[
id,transactionStatus[name,friendlyName]
]]'''),
column_helper.Column('power_state', ('powerState', 'name')),
column_helper.Column(
'created_by',
('billingItem', 'orderItem', 'order', 'userRecord', 'username')),
column_helper.Column(
'tags',
lambda server: formatting.tags(server.get('tagReferences')),
mask="tagReferences.tag.name"),
]
DEFAULT_COLUMNS = [
'id',
'hostname',
'primary_ip',
'backend_ip',
'datacenter',
'action',
]
@click.command()
@click.option('--cpu', '-c', help='Filter by number of CPU cores')
@click.option('--domain', '-D', help='Filter by domain')
@click.option('--datacenter', '-d', help='Filter by datacenter')
@click.option('--hostname', '-H', help='Filter by hostname')
@click.option('--memory', '-m', help='Filter by memory in gigabytes')
@click.option('--network', '-n', help='Filter by network port speed in Mbps')
@helpers.multi_option('--tag', help='Filter by tags')
@click.option('--sortby', help='Column to sort by', default='hostname')
@click.option('--columns',
callback=column_helper.get_formatter(COLUMNS),
help='Columns to display. Options: %s'
% ', '.join(column.name for column in COLUMNS),
default=','.join(DEFAULT_COLUMNS))
@environment.pass_env
def cli(env, sortby, cpu, domain, datacenter, hostname, memory, network, tag,
columns):
"""List hardware servers."""
manager = SoftLayer.HardwareManager(env.client)
servers = manager.list_hardware(hostname=hostname,
domain=domain,
cpus=cpu,
memory=memory,
datacenter=datacenter,
nic_speed=network,
tags=tag,
mask=columns.mask())
table = formatting.Table(columns.columns)
table.sortby = sortby
for server in servers:
table.add_row([value or formatting.blank()
for value in columns.row(server)])
env.fout(table)
|
dkopecek/amplify | third-party/quex-0.65.2/quex/engine/analyzer/mega_state/template/gain_transition_map.py | Python | gpl-2.0 | 4,059 | 0.0069 | # vim:set encoding=utf8:
# (C) 2010-2012 Frank-Rene Schäfer
from quex.engine.analyzer.mega_state.target import TargetByStateKey
from quex.engine.analyzer.transition_map import TransitionMap
from itertools import chain
def do(ATm, AStateN, ASchemeN, BTm, BStateN, BSchemeN):
"""*Tm -- transition map.
*StateN -- number of implemented states.
*SchemeN -- number of different target schemes in transition map.
Estimate the gain that can be achieved by combining two transition
| maps into a signle one.
"""
# Costs of each single transition maps
a_cost = __transition_map_cost(AStateN, len(ATm), ASchemeN)
b_cost = __transition_map_cost(BStateN, len(BTm), BSchemeN)
| # Cost of the combined transition map
combined_cost = _transition_cost_combined(ATm, BTm, AStateN + BStateN)
return ((a_cost + b_cost) - combined_cost)
def _transition_cost_combined(TM_A, TM_B, ImplementedStateN):
"""Computes the storage consumption of a transition map.
"""
# Count the number of unique schemes and the total interval number
scheme_set = set()
uniform_target_n = 0
interval_n = 0
for begin, end, a_target, b_target in TransitionMap.izip(TM_A, TM_B):
interval_n += 1
if a_target.uniform_door_id is not None \
and a_target.uniform_door_id == a_target.uniform_door_id:
uniform_target_n += 1
else:
update_scheme_set(scheme_set, a_target, b_target)
# The number of different schemes:
scheme_n = len(scheme_set)
return __transition_map_cost(ImplementedStateN, interval_n, scheme_n)
def __transition_map_cost(ImplementedStateN, IntervalN, SchemeN):
"""ImplementedStateN -- Number of states which are implemeted in the scheme.
IntervalN -- Number of intervals in the transition map.
SchemeN -- Number of DIFFERENT schemes in the transition map.
Find a number which is proportional to the 'cost' of the transition
map. Example:
interval 1 --> [1, 3, 5, 1]
interval 2 --> drop_out
interval 3 --> [1, 3, 5, 1]
interval 4 --> 5
interval 5 --> [1, 3, 5, 1]
interval 6 --> [2, 1, 1, 2]
This transition map has 5 borders and 5 targets. Let the cost
of a border as well as the cost for a single target be '1'.
The additional cost for a required scheme is chosen to be
'number of scheme elements' which is the number of implemented
states. Schemes that are the same are counted as 1.
"""
#print "#ImplementedStateN", ImplementedStateN
#print "#IntervalN", IntervalN
#print "#SchemeN", SchemeN
cost_border = IntervalN - 1
target_n = IntervalN
cost_targets = target_n + SchemeN * ImplementedStateN
return cost_border + cost_targets
def update_scheme_set(scheme_set, TA, TB):
"""This function is used to count the number of different schemes in a
combination of transition maps. The number of different schemes is used
to determine the cost a combination of transition maps.
NOTE: The use of 'hash' has the potential to miss a non-equal occurrence.
The value is only for metrics. So its no great deal.
RETURNS: True -- if size remains the same
False -- if size increases (scheme was new)
"""
assert isinstance(TA, TargetByStateKey)
assert isinstance(TB, TargetByStateKey)
# The 'common drop_out case' is covered by 'uniform_door_id'
if TA.uniform_door_id is not None:
if TA.uniform_door_id == TB.uniform_door_id:
return False
my_hash = 0x5A5A5A5A
prime = 1299827 # Use a huge prime number for deterministic randomization
for i, x in enumerate(chain(TA.iterable_door_id_scheme(),
TB.iterable_door_id_scheme())):
my_hash ^= hash(x) * i
my_hash ^= prime
size_before = len(scheme_set)
scheme_set.add(my_hash)
return size_before == len(scheme_set)
|
shubhdev/edx-platform | lms/djangoapps/discussion_api/tests/test_views.py | Python | agpl-3.0 | 28,826 | 0.001249 | """
Tests for Discussion API views
"""
from datetime import datetime
import json
from urlparse import urlparse
import httpretty
import mock
from pytz import UTC
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from discussion_api.tests.utils import (
CommentsServiceMockMixin,
make_minimal_cs_comment,
make_minimal_cs_thread,
)
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from util.testing import UrlResetMixin
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class DiscussionAPIViewTestMixin(CommentsServiceMockMixin, UrlResetMixin):
"""
Mixin for common code in tests of Discussion API views. This includes
creation of common structures (e.g. a course, user, and enrollment), logging
in the test client, utility functions, and a test case for unauthenticated
requests. Subclasses must set self.url in their setUp methods.
"""
client_class = APIClient
@mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(DiscussionAPIViewTestMixin, self).setUp()
self.maxDiff = None # pylint: disable=invalid-name
self.course = CourseFactory.create(
org="x",
course="y",
run="z",
start=datetime.now(UTC),
discussion_topics={"Test Topic": {"id": "test_topic"}}
)
self.password = "password"
self.user = UserFactory.create(password=self.password)
CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
self.client.login(username=self.user.username, password=self.password)
def assert_response_correct(self, response, expected_status, expected_content):
"""
Assert that the response has the given status code and parsed content
"""
self.assertEqual(response.status_code, expected_status)
parsed_content = json.loads(response.content)
self.assertEqual(parsed_content, expected_content)
def test_ | not_authenticated(self):
self.client.logout()
response = self.client.get(self.url)
self.assert_response_correct(
response,
401,
{"developer_message": "Authentication credentials were not provided."}
)
class CourseViewTest( | DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CourseView"""
def setUp(self):
super(CourseViewTest, self).setUp()
self.url = reverse("discussion_course", kwargs={"course_id": unicode(self.course.id)})
def test_404(self):
response = self.client.get(
reverse("course_topics", kwargs={"course_id": "non/existent/course"})
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_get_success(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
200,
{
"id": unicode(self.course.id),
"blackouts": [],
"thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz",
"topics_url": "http://testserver/api/discussion/v1/course_topics/x/y/z",
}
)
class CourseTopicsViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for CourseTopicsView"""
def setUp(self):
super(CourseTopicsViewTest, self).setUp()
self.url = reverse("course_topics", kwargs={"course_id": unicode(self.course.id)})
def test_404(self):
response = self.client.get(
reverse("course_topics", kwargs={"course_id": "non/existent/course"})
)
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_get_success(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
200,
{
"courseware_topics": [],
"non_courseware_topics": [{
"id": "test_topic",
"name": "Test Topic",
"children": [],
"thread_list_url":
"http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&topic_id=test_topic",
}],
}
)
@httpretty.activate
class ThreadViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase):
"""Tests for ThreadViewSet list"""
def setUp(self):
super(ThreadViewSetListTest, self).setUp()
self.author = UserFactory.create()
self.url = reverse("thread-list")
def test_course_id_missing(self):
response = self.client.get(self.url)
self.assert_response_correct(
response,
400,
{"field_errors": {"course_id": {"developer_message": "This field is required."}}}
)
def test_404(self):
response = self.client.get(self.url, {"course_id": unicode("non/existent/course")})
self.assert_response_correct(
response,
404,
{"developer_message": "Not found."}
)
def test_basic(self):
self.register_get_user_response(self.user, upvoted_ids=["test_thread"])
source_threads = [{
"id": "test_thread",
"course_id": unicode(self.course.id),
"commentable_id": "test_topic",
"group_id": None,
"user_id": str(self.author.id),
"username": self.author.username,
"anonymous": False,
"anonymous_to_peers": False,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"thread_type": "discussion",
"title": "Test Title",
"body": "Test body",
"pinned": False,
"closed": False,
"abuse_flaggers": [],
"votes": {"up_count": 4},
"comments_count": 5,
"unread_comments_count": 3,
}]
expected_threads = [{
"id": "test_thread",
"course_id": unicode(self.course.id),
"topic_id": "test_topic",
"group_id": None,
"group_name": None,
"author": self.author.username,
"author_label": None,
"created_at": "2015-04-28T00:00:00Z",
"updated_at": "2015-04-28T11:11:11Z",
"type": "discussion",
"title": "Test Title",
"raw_body": "Test body",
"pinned": False,
"closed": False,
"following": False,
"abuse_flagged": False,
"voted": True,
"vote_count": 4,
"comment_count": 5,
"unread_comment_count": 3,
"comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread",
"endorsed_comment_list_url": None,
"non_endorsed_comment_list_url": None,
}]
self.register_get_threads_response(source_threads, page=1, num_pages=2)
response = self.client.get(self.url, {"course_id": unicode(self.course.id)})
self.assert_response_correct(
response,
200,
{
"results": expected_threads,
"next": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&page=2",
"previous": None,
}
)
self.assert_last_query_params({
"course_id": [unicode(self.course.id)],
"sort_key": ["date"],
"sort_order": ["desc"],
"page": ["1"],
"per_page": ["10"],
"recursive": ["False"],
})
def test_pagination(self):
self.register_get_user_response(self.user)
self.register_get_threads_response([], page=1, num_pages=1)
response = self.client.get(
self.url,
{"course_id": unicode( |
weichen2046/IntellijPluginDevDemo | enterprise-repo/enterprepo/enterprepo/urls.py | Python | apache-2.0 | 1,167 | 0.000857 | """enterprepo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Ad | d a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including anoth | er URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^pluginrepo/', include('pluginrepo.urls')),
url(r'^pluginjar/', include('pluginjar.urls')),
url(r'^admin/', admin.site.urls),
url(r'^api/v1/', include('apiv1.urls')),
# for static files
url(r'^.*\.(woff2|woff|ttf|js|map|png|jpg|jpeg)', include('staticwrapper.urls')),
url(r'^.*$', TemplateView.as_view(template_name="home.html"), name="home"),
]
|
arantebillywilson/python-snippets | microblog/flask/lib/python3.5/site-packages/openid/oidutil.py | Python | mit | 6,651 | 0.00015 | """This module contains general utility code that is used throughout
the library.
For users of this library, the C{L{log}} function is probably the most
interesting.
"""
__all__ = [
'log', 'appendArgs', 'toBase64', 'fromBase64', 'autoSubmitHTML',
'toUnicode'
]
import binascii
import logging
# import urllib.parse as urlparse
from urllib.parse import urlencode
xxe_safe_elementtree_modules = [
'defusedxml.cElementTree',
'defusedxml.ElementTree',
]
elementtree_modules = [
'xml.etree.cElementTree',
'xml.etree.ElementTree',
'cElementTree',
'elementtree.ElementTree',
]
def toUnicode(value):
"""Returns the given argument as a unicode object.
@param value: A UTF-8 encoded string or a unicode (coercable) object
@type message: str or unicode
@returns: Unicode object representing the input value.
"""
if isinstance(value, bytes):
return value.decode('utf-8')
return str(value)
def autoSubmitHTML(form, title='OpenID transaction in progress'):
if isinstance(form, bytes):
form = str(form, encoding="utf-8")
if isinstance(title, bytes):
title = str(title, encoding="utf-8")
html = """
<html>
<head>
<title>%s</title>
</head>
<body onload="document.forms[0].submit();">
%s
<script>
var elements = document.forms[0].elements;
for (var i = 0; i < elements.length; i++) {
elements[i].style.display = "none";
}
</script>
</body>
</html>
""" % (title, form)
return html
def importSafeElementTree(module_names=None):
"""Find a working ElementTree implementation that is not vulnerable
to XXE, using `defusedxml`.
>>> XXESafeElementTree = importSafeElementTree()
@param module_names: The names of modules to try to use as
a safe ElementTree. Defaults to C{L{xxe_safe_elementtree_modules}}
@returns: An ElementTree module that is not vulnerable to XXE.
"""
if module_names is None:
module_names = xxe_safe_elementtree_modules
try:
return importElementTree(module_names)
except ImportError:
raise ImportError('Unable to find a ElementTree module '
'that is not vulnerable to XXE. '
'Tried importing %r' % (module_names, ))
def importElementTree(module_names=None):
"""Find a working ElementTree implementation, trying the standard
places that such a thing might show up.
>>> ElementTree = importElementTree()
@param module_names: The names of modules to try to use as
ElementTree. Defaults to C{L{elementtree_modules}}
@returns: An ElementTree module
"""
if module_names is None:
module_names = elementtree_modules
for mod_name in module_names:
try:
ElementTree = __import__(mod_name, None, None, ['unused'])
except ImportError:
pass
else:
# Make sure it can actually parse XML
try:
ElementTree.XML('<unused/>')
except (SystemExit, MemoryError, AssertionError):
raise
except:
logging.exception(
'Not using ElementTree library %r because it failed to '
'parse a trivial document: %s' % mod_name)
else:
return ElementTree
else:
raise ImportError('No ElementTree library found. '
'You may need to install one. '
'Tried importing %r' % (module_names, ))
def log(message, level=0):
"""Handle a log message from the OpenID library.
This is a legacy function which redirects to logging.error.
The logging module should be used instead of this
@param message: A string containing a debugging message from the
OpenID library
@type message: str
@param level: The severity of the log message. This parameter is
currently unused, but in the future, the library may indicate
more important information with a higher level value.
@type level: int or None
@returns: Nothing.
"""
logging.error("This is a legacy log message, please use the "
"logging module. Message: %s", message)
def appendArgs(url, args):
"""Append query arguments to a HTTP(s) URL. If the URL already has
query arguemtns, these arguments will be added, and the existing
arguments will be preserved. Duplicate arguments will not be
detected or collapsed (both will appear in the output).
@param url: The url to which the arguments will be appended
@type url: str
@param args: The query arguments to add to the URL. If a
dictionary is passed, the items will be sorted before
appending them to the URL. If a sequence of pairs is passed,
the order of the sequence will be preserved.
@type args: A dictionary from string to string, or a sequence of
pairs of strings.
@returns: The URL with the parameters added
@rtype: str
"""
if hasattr(args, 'items'):
args = sorted(args.items())
else:
args = list(args)
if not isinstance(url, str):
url = str(url, encoding="utf-8")
if not args:
return url
if '?' in url:
sep = '&'
else:
sep = '?'
# Map unicode to UTF-8 if present. Do not make any assumptions
# about the encodings of plain bytes (str).
i = 0
for k, v in args:
if not isinstance(k, bytes):
k = k.encode('utf-8')
if not isinstance(v, bytes):
v = v.encode('utf-8')
args[i] = (k, v)
i += 1
return '%s%s%s' % (url, sep, urlencode(args))
def toBase64(s):
"""Represent string / bytes s as base64, omitting newlines"""
if isinstance(s, str):
s = s.encode("utf-8")
retu | rn binascii.b2a_base64(s)[:-1]
def fromBase64(s):
if isinstance(s, str):
s = s.encode("utf-8")
try:
return binascii.a2b_base64(s)
except binascii.Error as why:
# Convert to a common exception type
raise ValueError(str(why))
class Symbol(object):
"""This class implements an object that compares equal to others
of the same type that have the same name. These are distict from
str or unicode objects.
| """
def __init__(self, name):
self.name = name
def __eq__(self, other):
return type(self) is type(other) and self.name == other.name
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash((self.__class__, self.name))
def __repr__(self):
return '<Symbol %s>' % (self.name, )
|
kingmotley/SickRage | sickbeard/providers/xthor.py | Python | gpl-3.0 | 7,253 | 0.003314 | # coding=utf-8
# Author: adaur <adaur.underground@gmail.com>
# Rewrite: Dustyn Gibson (miigotu) <miigotu@gmail.com>
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
from requests.utils import dict_from_cookiejar
from sickbeard import logger, tvcache
from sickbeard.bs4_parser import BS4Parser
from sickrage.helper.common import convert_size, try_int
from sickrage.providers.torrent.TorrentProvider import TorrentProvider
class XthorProvider(TorrentProvider): # pylint: disable=too-many-instance-attributes
def __init__(self):
# Provider Init
TorrentProvider.__init__(self, "Xthor")
# Credentials
self.username = None
self.password = None
# Torrent Stats
self.minseed = None
self.minleech = None
self.freeleech = None
# URLs
self.url = 'https://xthor.bz'
self.urls = {
'login': self.url + '/takelogin.php',
'search': self.url + '/browse.php?'
}
# Proper Strings
self.proper_strings = ['PROPER']
# Cache
self.cache = tvcache.TVCache(self, min_time=30)
def login(self):
if any(dict_from_cookiejar(self.session.cookies).values()):
return True
login_params = {
'username': self.username,
'password': self.password,
'submitme': 'X'
}
response = self.get_url(self.urls['login'], post_data=login_params, returns='text')
if not response:
logger.log(u"Unable to connect to provider", logger.WARNING)
return False
if not re.search('donate.php', response):
logger.log(u"Invalid username or password. Check your settings", logger.WARNING)
return False
return True
def se | arch(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
results = []
if not self.login():
return results
"""
Séries / Pack TV 13
Séries / TV FR 14
Séries / HD FR 15
Séries / TV VOSTFR 16
Séries / HD VOSTFR 17
Mangas (Anime) 32
Sport 34
"""
# Search Params
search_params = {
| 'only_free': try_int(self.freeleech),
'searchin': 'title',
'incldead': 0,
'type': 'desc',
'c13': 1, 'c14': 1, 'c15': 1,
'c16': 1, 'c17': 1, 'c32': 1
}
# Units
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
def process_column_header(td):
result = ''
if td.a:
result = td.a.get('title', td.a.get_text(strip=True))
if not result:
result = td.get_text(strip=True)
return result
for mode in search_strings:
items = []
logger.log(u"Search Mode: {0}".format(mode), logger.DEBUG)
# Sorting: 1: Name, 3: Comments, 5: Size, 6: Completed, 7: Seeders, 8: Leechers (4: Time ?)
search_params['sort'] = (7, 4)[mode == 'RSS']
for search_string in search_strings[mode]:
if mode != 'RSS':
logger.log(u"Search string: {0}".format
(search_string.decode("utf-8")), logger.DEBUG)
search_params['search'] = search_string
data = self.get_url(self.urls['search'], params=search_params, returns='text')
if not data:
logger.log(u"No data returned from provider", logger.DEBUG)
continue
with BS4Parser(data, 'html5lib') as html:
torrent_table = html.find("table", class_="table2 table-bordered2")
torrent_rows = []
if torrent_table:
torrent_rows = torrent_table("tr")
# Continue only if at least one Release is found
if len(torrent_rows) < 2:
logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG)
continue
# Catégorie, Nom du Torrent, (Download), (Bookmark), Com., Taille, Compl�t�, Seeders, Leechers
labels = [process_column_header(label) for label in torrent_rows[0]('td')]
# Skip column headers
for row in torrent_rows[1:]:
cells = row('td')
if len(cells) < len(labels):
continue
try:
title = cells[labels.index('Nom du Torrent')].get_text(strip=True)
download_url = self.url + '/' + row.find("a", href=re.compile("download.php"))['href']
if not all([title, download_url]):
continue
seeders = try_int(cells[labels.index('Seeders')].get_text(strip=True))
leechers = try_int(cells[labels.index('Leechers')].get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != 'RSS':
logger.log(u"Discarding torrent because it doesn't meet the"
u" minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers), logger.DEBUG)
continue
torrent_size = cells[labels.index('Taille')].get_text()
size = convert_size(torrent_size, units=units) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': ''}
if mode != 'RSS':
logger.log(u"Found result: {0} with {1} seeders and {2} leechers".format
(title, seeders, leechers), logger.DEBUG)
items.append(item)
except StandardError:
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
provider = XthorProvider()
|
sinnwerkstatt/landmatrix | apps/landmatrix/models/deal_submodels.py | Python | agpl-3.0 | 5,262 | 0.00133 | # from django.contrib.gis.db import models as gismodels
# from django.contrib.postgres.fields import JSONField
# from django.db import models
# from django.utils.translation import ugettext as _
#
# from apps.landmatrix.models import Deal
# from apps.landmatrix.models.mixins import (
# OldContractMixin,
# OldDataSourceMixin,
# OldLocationMixin,
# FromDictMixin,
# )
# from apps.landmatrix.models.versions import Version, register_version
#
#
# class DealSubmodelManager(models.Manager):
# def visible(self, user=None):
# qs = self.get_queryset()
# if user and (user.is_staff or user.is_superuser):
# return qs.all()
# return qs.filter(deal__status__in=(2, 3), deal__confidential=False)
#
#
# class LocationVersion(Version):
# pass
#
#
# @register_version(LocationVersion)
# class Location(models.Model, FromDictMixin, OldLocationMixin):
# name = models.CharField(max_length=2000, blank=True)
# description = models.CharField(max_length=2000, blank=True)
# point = gismodels.PointField(blank=True, null=True)
# facility_name = models.CharField(max_length=2000, blank=True)
# ACCURACY_CHOICES = (
# ("COUNTRY", _("Country")),
# ("ADMINISTRATIVE_REGION", _("Administrative region")),
# ("APPROXIMATE_LOCATION", _("Approximate location")),
# ("EXACT_LOCATION", _("Exact location")),
# ("COORDINATES", _("Coordinates")),
# )
# level_of_accuracy = models.CharField(
# _("Spatial accuracy level"),
# choices=ACCURACY_CHOICES,
# max_length=100,
# blank=True,
# null=True,
# )
# comment = models.TextField(blank=True)
#
# # contract_area = gismodels.MultiPolygonField(blank=True, null=True)
# # intended_area = gismodels.MultiPolygonField(blank=True, null=True)
# # production_area = gismodels.MultiPolygonField(blank=True, null=True)
# areas = JSONField(blank=True, null=True)
#
# deal = models.ForeignKey(Deal, on_delete=models.CASCADE, related_name="locations")
# old_group_id = models.IntegerField(null=True, blank=True)
#
# objects = DealSubmodelManager()
#
# def __str__(self):
# return f"(#{self.deal_id}) {self.name}"
#
#
# class ContractVersion(Version):
# pass
#
#
# @register_version(ContractVersion)
# class Contract(models.Model, OldContractMixin):
# number = models.CharField(_("Contract number"), max_length=255, blank=True)
# date = models.DateField(blank=True, null=True)
# expiration_date = models.DateField(blank=True, null=True)
# agreement_duration = models.IntegerField(
# _("Duration of the agreement (in years)"), blank=True, null=True
# )
# comment = models.TextField(blank=True)
#
# deal = models.ForeignKey(Deal, on_delete=models.CASCADE, related_name="contracts")
# old_group_id = models.IntegerField(null=True, blank=True)
#
# objects = DealSubmodelManager()
#
# def __str__(self):
# return f"(#{self.deal_id}) {self.number}"
#
#
# class DataSourceVersion(Version):
# pass
#
#
# @register_version(DataSourceVersion)
# class DataSource(models.Model, OldDataSourceMixin):
# TYPE_CHOICES = (
# ("MEDIA_REPORT", _("Media report")),
# ("RESEARCH_PAPER_OR_POLICY_REPORT", _("Research Paper / Policy Report")),
# ("GOVERNMENT_SOURCES", _("Government sources")),
# ("COMPANY_SOURCES", _("Company sources")),
# ("CONTRACT", _("Contract")),
# ("CONTRACT_FARMING_AGREEMENT", _("Contract (contract farming agreement)")),
# ("PERSONAL_INFORMATION", _("Personal information")),
# ("CROWDSOURCING", _("Crowdsourcing")),
# ("OTHER", _("Other (Please specify in comment field)")),
# )
# type = models.CharField(choices=TYPE_CHOICES, max_length=100, blank=True)
# url = models.URLField(max_length=5000, blank=True, null=True)
# file = models.FileField(
# _("File"),
# upload_to="uploads",
# max_length=5000,
# help_text=_("Maximum file size: 10MB"),
# blank=True,
# null=True,
# )
# file_not_public = models.BooleanField(_("Keep PDF not public"), default=False)
# publication_title = models.CharField(max_length=5000, blank=True)
# date = models.DateFiel | d(blank=True, null=True)
# name = models.CharField(_("Nam | e"), max_length=500, blank=True)
# company = models.CharField(_("Company"), max_length=500, blank=True)
# email = models.EmailField(_("Email"), blank=True)
# phone = models.CharField(_("Phone"), max_length=500, blank=True)
#
# includes_in_country_verified_information = models.NullBooleanField(
# _("Includes in-country-verified information")
# )
# open_land_contracts_id = models.CharField(
# _("Open Contracting ID"), max_length=500, blank=True
# )
# comment = models.TextField(_("Comment on data source"), blank=True)
#
# deal = models.ForeignKey(Deal, on_delete=models.CASCADE, related_name="datasources")
# old_group_id = models.IntegerField(null=True, blank=True)
#
# objects = DealSubmodelManager()
#
# def __str__(self):
# return f"(#{self.deal_id}) {self.get_type_display()}"
#
# class Meta:
# ordering = ["date"]
|
ntt-sic/neutron | neutron/plugins/nicira/extensions/nvp_qos.py | Python | apache-2.0 | 7,517 | 0.000133 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Aaron Rosen, Nicira Networks, Inc.
from abc import abstractmethod
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.common import exceptions as qexception
from neutron import manager
# For policy.json/Auth
qos_queue_create = "create_qos_queue"
qos_queue_delete = "delete_qos_queue"
qos_queue_get = "get_qos_queue"
qos_queue_list = "get_qos_queues"
class DefaultQueueCreateNotAdmin(qexception.InUse):
message = _("Need to be admin in order to create queue called default")
class DefaultQueueAlreadyExists(qexception.InUse):
message = _("Default queue already exists.")
class QueueInvalidDscp(qexception.InvalidInput):
message = _("Invalid value for dscp %(data)s must be integer value"
" between 0 and 63.")
class QueueMinGreaterMax(qexception.InvalidInput):
message = _("Invalid bandwidth rate, min greater than max.")
class QueueInvalidBandwidth(qexception.InvalidInput):
message = _("Invalid bandwidth rate, %(data)s must be a non negative"
" integer.")
class QueueNotFound(qexception.NotFound):
message = _("Queue %(id)s does not exist")
class QueueInUseByPort(qexception.InUse):
message = _("Unable to delete queue attached to port.")
class QueuePortBindingNotFound(qexception.NotFound):
message = _("Port is not associated with lqueue")
def convert_to_unsigned_int_or_none(val):
if val is None:
return
try:
val = int(val)
if val < 0:
raise ValueError
except (ValueError, TypeError):
msg = _("'%s' must be a non negative integer.") % val
raise qexception.InvalidInput(error_message=msg)
return val
def convert_to_unsigned_int_or_none_max_63(val):
val = convert_to_unsigned_int_or_none(val)
if val > 63:
raise QueueInvalidDscp(data=val)
return val
# As per NVP API, if a queue is trusted, DSCP must be omitted; if a queue is
# untrusted, DSCP must be specified. Whichever default values we choose for
# the tuple (qos_marking, dscp), there will be at least one combination of a
# request with conflicting values: for instance, with the following default:
#
# qos_marking = 'untrusted', dscp = '0'
#
# requests with qos_marking = 'trusted' and a default dscp will fail. Since
# it is convoluted to ask the admin to specify a None value for dscp when
# qos_marking is 'trusted', it is best to ignore the dscp value, regardless
# of whether it has been specified or not. This preserves the chosen default
# and keeps backward compatibility with the API. A warning will be logged, as
# the server is overriding a potentially conflicting request from the admin
RESOURCE_ATTRIBUTE_MAP = {
'qos_queues': {
'id': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'default': {'allow_post': True, 'allow_put': False,
'convert_to': attr.convert_to_boolean,
'is_visible': True, 'default': False},
'name': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'min': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': '0',
'convert_to': convert_to_unsigned_int_or_none},
'max': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': None,
'convert_to': convert_to_unsigned_int_or_none},
'qos_marking': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': ['untrusted', 'trusted']},
'default': 'untrusted', 'is_visible': True},
'dscp': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': '0',
'convert_to': convert_to_unsigned_int_or_none_max_63},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': None},
'is_visible': True},
},
}
QUEUE = 'queue_id'
RXTX_FACTOR = 'rxtx_factor'
EXTENDED_ATTRIBUTES_2_0 = {
'ports': {
RXTX_FACTOR: {'allow_post': True,
# FIXME(arosen): the nvp plugin currently does not
# implement updating rxtx factor on port.
'allow_put': True,
'is_visible': False,
'default': 1,
'enforce_policy': True,
'convert_to': convert_to_unsigned_int_or_none},
QUEUE: {'allow_post': False,
'allow_put': False,
'is_visible': True,
'default': False,
'enforce_policy': True}},
'networks': {QUEUE: {'allow_post': True,
'allow_put': True,
'is_visible': True,
'default': False,
'enforce_policy': True}}
}
class Nvp_qos(object):
"""Port Queue extension."""
@classmethod
def get_name(cls):
return "nvp-qos"
@classmethod
def get_alias(cls):
return "nvp-qos"
@classmethod
def get_description(cls):
return "NVP QoS extension."
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/nvp-qos/api/v2.0"
@classmethod
def get_updated(cls):
return "2012-10-05T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
exts = []
plugin = manager.NeutronManager.get_plugin()
resource_name = 'qos_queue'
collection_name = resource_name.replace('_', '-') + "s"
params = RESOURCE_ATTRIBUTE_MAP.get(resource_name + "s", dict())
controller = base.create_resource(collection_name,
| resource_name,
plugin, params, allow_bulk=False)
ex = extensions.ResourceExtension(collection_name,
| controller)
exts.append(ex)
return exts
def get_extended_resources(self, version):
if version == "2.0":
return dict(EXTENDED_ATTRIBUTES_2_0.items() +
RESOURCE_ATTRIBUTE_MAP.items())
else:
return {}
class QueuePluginBase(object):
@abstractmethod
def create_qos_queue(self, context, queue):
pass
@abstractmethod
def delete_qos_queue(self, context, id):
pass
@abstractmethod
def get_qos_queue(self, context, id, fields=None):
pass
@abstractmethod
def get_qos_queues(self, context, filters=None, fields=None):
pass
|
snownothing/Python | web/server.py | Python | mit | 651 | 0.004608 | # !/usr/bin/env python
# coding: utf-8
__author__ = 'Moch'
import tornado.ioloop
im | port tornado.options
import tornado.httpserver
from application import application
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
print("Development server is runing at http://127.0.0.1:{}".format(options.port))
print("Quit the server with Control-C")
tornado.ioloop.IOLoo | p.instance().start()
if __name__ == "__main__":
main()
|
nolanliou/tensorflow | tensorflow/contrib/quantize/python/quantize.py | Python | apache-2.0 | 11,989 | 0.004754 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logic to update a TensorFlow model graph with quantization operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.contrib import graph_editor
from tensorflow.contrib.quantize.python import graph_matcher
from tensorflow.contrib.quantize.python import input_to_ops
from tensorflow.contrib.quantize.python import quant_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import training_util
# Quantizable operation types that are supported by the quantization rewrite.
_QUANTIZABLE_TYPES = {'Conv2D', 'MatMul', 'DepthwiseConv2dNative'}
# Activations that are supported by the quantization rewrite.
_ACTIVATION_TYPES = {'Relu', 'Relu6', 'Identity'}
# Weight types that are supported by the quantization rewrite.
# TODO(suharshs): Add support for ResourceVariable.
_WEIGHT_TYPES = {'Variable', 'VariableV2'}
def Quantize(graph,
weight_bits=8,
activation_bits=8,
ema_decay=0.999,
quant_delay=None,
vars_collection=ops.GraphKeys.MOVING_AVERAGE_VARIABLES,
is_training=True):
"""Updates graph with quantization operations.
Args:
graph: Graph to modify.
weight_bits: Number of bits to use for quantizing weights.
activation_bits: Number of bits to use for quantizing activations.
ema_decay: (Optional) Float, EMA decay parameter. EMA is used to update
quantization intervals for quantizing activations (see here about EMA:
https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average).
quant_delay: (Optional, default None) Int, count of global steps for which
to delay quantization. This helps weights stabilize at the start of
training.
vars_collection: (Optional) Collection where to store the variables for
quantization interval ends.
is_training: (Optional) Whether quantizing training graph or eval graph.
Raises:
ValueError: When quantization fails.
"""
input_to_ops_map = input_to_ops.InputToOps(graph)
for layer_match in _FindLayersToQuantize(graph):
# Quantize the weights.
context = _GetContextFromOp(layer_match.layer_op)
_InsertQuantOp(
context,
layer_match.weight_tensor.op, [layer_match.layer_op],
name='weights_quant',
moving_avg=False,
bits=weight_bits,
| ema_decay=ema_decay,
quant_delay=quant_delay,
is_training=is_training,
narrow_range=True,
vars_collection=vars_collection) |
# Quantize the activations.
consumer_ops = input_to_ops_map.ConsumerOperations(
layer_match.activation_op)
add_context = context
if layer_match.bypass_op:
add_context = re.search(r'^(.*)/([^/]+)', context).group(1)
_InsertQuantOp(
add_context,
layer_match.activation_op,
consumer_ops,
name='act_quant',
moving_avg=True,
init_min=0.0,
ema_decay=ema_decay,
quant_delay=quant_delay,
bits=activation_bits,
vars_collection=vars_collection)
# Quantize the inputs and output to the bypass (if it exists). The input to
# the bypass is the bias add, and the output is the activation.
if layer_match.bypass_op is not None:
_InsertQuantOp(
context,
layer_match.bias_add_op, [layer_match.bypass_op],
name='conv_quant',
moving_avg=True,
ema_decay=ema_decay,
quant_delay=quant_delay,
vars_collection=vars_collection,
bits=activation_bits)
_InsertQuantOp(
add_context,
layer_match.bypass_op,
input_to_ops_map.ConsumerOperations(layer_match.bypass_op),
name='add_quant',
moving_avg=True,
bits=activation_bits)
def _FindLayersToQuantize(graph):
"""Matches layers in graph to quantize.
Args:
graph: Graph to perform match on.
Yields:
_LayerMatches.
"""
input_pattern = graph_matcher.OpTypePattern('*')
weight_var_pattern = graph_matcher.OpTypePattern('|'.join(_WEIGHT_TYPES))
weight_pattern = graph_matcher.OpTypePattern(
'Identity', inputs=[weight_var_pattern])
folded_weight_pattern = graph_matcher.OpTypePattern('Mul')
# The weights inputs to the layer operation can either be from the Variable or
# the folded weight (Mul).
layer_pattern = graph_matcher.OpTypePattern(
'|'.join(_QUANTIZABLE_TYPES),
inputs=[
input_pattern,
graph_matcher.OneofPattern([weight_pattern, folded_weight_pattern])
])
folded_bias_mul_pattern = graph_matcher.OpTypePattern(
'Mul', inputs=[graph_matcher.OpTypePattern('*'), layer_pattern])
post_layer_op_correction_pattern = graph_matcher.OpTypePattern(
'Add', inputs=[folded_bias_mul_pattern,
graph_matcher.OpTypePattern('*')])
folded_bias_add_pattern = graph_matcher.OpTypePattern(
'Add',
inputs=[
post_layer_op_correction_pattern,
graph_matcher.OpTypePattern('*')
])
bias_add_pattern = graph_matcher.OpTypePattern(
'Add|BiasAdd', inputs=[layer_pattern, '*'])
# The bias can come from the bias add or the folded bias add.
bypass_pattern_a = graph_matcher.OpTypePattern(
'Add',
inputs=[
graph_matcher.OneofPattern(
[bias_add_pattern, folded_bias_add_pattern]), '*'
])
bypass_pattern_b = graph_matcher.OpTypePattern(
'Add',
inputs=[
'*',
graph_matcher.OneofPattern(
[bias_add_pattern, folded_bias_add_pattern])
])
# The input to the activation can come from bias add, fold bias add or the
# bypasses.
activation_pattern = graph_matcher.OpTypePattern(
'|'.join(_ACTIVATION_TYPES),
inputs=[
graph_matcher.OneofPattern([
bias_add_pattern, folded_bias_add_pattern, bypass_pattern_a,
bypass_pattern_b
])
])
layer_matcher = graph_matcher.GraphMatcher(activation_pattern)
for match_result in layer_matcher.match_graph(graph):
layer_op = match_result.get_op(layer_pattern)
weight_tensor = match_result.get_tensor(weight_pattern)
if weight_tensor is None:
weight_tensor = match_result.get_tensor(folded_weight_pattern)
activation_op = match_result.get_op(activation_pattern)
bias_add_op = match_result.get_op(bias_add_pattern)
if bias_add_op is None:
bias_add_op = match_result.get_op(folded_bias_add_pattern)
bypass_op = match_result.get_op(bypass_pattern_a)
if bypass_op is None:
bypass_op = match_result.get_op(bypass_pattern_b)
yield _LayerMatch(layer_op, weight_tensor, activation_op, bypass_op,
bias_add_op)
class _LayerMatch(object):
"""Contains all information related to a matched Layer."""
def __init__(self, layer_op, weight_tensor, activation_op, bypass_op,
bias_add_op):
self._layer_op = layer_op
self._weight_tensor = weight_tensor
self._activation_op = activation_op
self._bypass_op = bypass_op
self._bias_add_op = bias_add_op
@property
def layer_op(self):
return self._layer_op
@property
def weight_tensor(self):
return self._weight_tensor
@property
def activation_op(self):
return self._activation_op
@property
def bypass_op(se |
google/tangent | tests/test_optimization.py | Python | apache-2.0 | 2,734 | 0.008413 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gast
import pytest
from tangent import optimization
from tangent import quoting
def test_assignment_propagation():
def f(x):
y = x
z = y
return z
node = quoting.parse_function(f)
node = optimization.assignment_propagation(node)
assert len(node.body[0].body) == 2
def test_dce():
def f(x):
y = 2 * x
return x
node = quoting.parse_function(f)
node = optimization.dead_code_elimination(node)
assert isinstance(node.body[0].body[0], gast.Return)
def test_fixed_point():
def f(x):
y = g(x)
z = h(y)
return x
node = quoting.parse_function(f)
node = optimization.optimize(node)
assert isinstance(node.body[0].body[0], gast.Return)
def test_constant_folding():
def f(x):
x = 1 * x
x = 0 * x
x = x * 1
x = x * 0
x = x * 2
x = 2 * x
x = 2 * 3
x = 1 + x
x = 0 + x
x = x + 1
x = x + 0
x = x + 2
x = 2 + x
x = 2 + 3
x = 1 - x
x = 0 - x
x = x - 1
x = x - 0
x = x - 2
x = 2 - x
x = 2 - 3
x = 1 / x
x = 0 / x
x = x / 1
x = x / 0
x = x / 2
x = 2 / x
x = 2 / 8
x = 1 ** x
x = 0 ** x
x = x ** 1
x = x ** 0
x = x ** 2
x = 2 ** x
x = 2 ** 3
def f_opt(x):
x = x
x = 0
x = x
x = 0
x = x * 2
x = 2 * x
x = 6
x = 1 + x
x = x
x = x + 1
x = x
x = x + 2
x = 2 + x
x = 5
x = 1 - x
x = -x
x = x - 1
x = x
x = x - 2
x = 2 - x
x = -1
x = 1 / x
x = 0 / x
x = x
x = x / 0
x = x / 2
x = 2 / x
x = 0.25
x = 1 |
x = 0
x = x
x = 1 |
x = x ** 2
x = 2 ** x
x = 8
node = quoting.parse_function(f)
node = optimization.constant_folding(node)
node_opt = quoting.parse_function(f_opt)
lines = quoting.to_source(node).strip().split('\n')[1:]
lines_opt = quoting.to_source(node_opt).strip().split('\n')[1:]
# In Python 2 integer division could be on, in which case...
if 1 / 2 == 0:
lines_opt[27] = ' x = 0'
assert lines == lines_opt
if __name__ == '__main__':
assert not pytest.main([__file__])
|
titeuf87/evennia | evennia/players/admin.py | Python | bsd-3-clause | 9,894 | 0.001516 | #
# This sets up how models are displayed
# in the web admin interface.
#
from builtins import object
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from evennia.players.models import PlayerDB
from evennia.typeclasses.admin import AttributeInline, TagInline
from evennia.utils import create
# handle the custom User editor
class PlayerDBChangeForm(UserChangeForm):
"""
Modify the playerdb class.
"""
class Meta(object):
model = PlayerDB
fields = '__all__'
username = forms.RegexField(
label="Username",
max_length=30,
regex=r'^[\w. @+-]+$',
widget=forms.TextInput(
attrs={'size': '30'}),
error_messages={
'invalid': "This value may contain only letters, spaces, numbers "
"and @/./+/-/_ characters."},
help_text="30 characters or fewer. Letters, spaces, digits and "
"@/./+/-/_ only.")
def clean_username(self):
"""
Clean the username and check its existence.
"""
username = self.cleaned_data['username']
if username.upper() == self.instance.username.upper():
return username
elif PlayerDB.objects.filter(username__iexact=username):
raise forms.ValidationError('A player with that name '
'already exists.')
return self.cleaned_data['username']
class PlayerDBCreationForm(UserCreationForm):
"""
Create a new PlayerDB instance.
"""
class Meta(object):
model = PlayerDB
fields = '__all__'
username = forms.RegexField(
label="Username",
max_length=30,
regex=r'^[\w. @+-]+$',
widget=forms.TextInput(
attrs={'size': '30'}),
error_messages={
'invalid': "This value may contain only letters, spaces, numbers "
"and @/./+/-/_ characters."},
help_text="30 characters or fewer. Letters, spaces, digits and "
"@/./+/-/_ only.")
def clean_username(self):
"""
Cleanup username.
"""
username = self.cleaned_data['username']
if PlayerDB.objects.filter(username__iexact=username):
raise forms.ValidationError('A player with that name already '
'exists.')
return username
class PlayerForm(forms.ModelForm):
"""
Defines how to display Players
"""
class Meta(object):
model = PlayerDB
fields = '__all__'
db_key = forms.RegexField(
label="Username",
initial="PlayerDummy",
max_length=30,
regex=r'^[\w. @+-]+$',
required=False,
widget=forms.TextInput(attrs={'size': '30'}),
error_messages={
'invalid': "This value may contain only letters, spaces, numbers"
" and @/./+/-/_ characters."},
help_text="This should be the same as the connected Player's key "
"name. 30 characters or fewer. Letters, spaces, digits and "
"@/./+/-/_ only.")
db_typeclass_path = forms.CharField(
label="Typeclass",
initial=settings.BASE_PLAYER_TYPECLASS,
widget=forms.TextInput(
attrs={'size': '78'}),
help_text="Required. Defines what 'type' of entity this is. This "
"variable holds a Python path to a module with a valid "
"Evennia Typeclass. Defaults to "
"settings.BASE_PLAYER_TYPECLASS.")
db_permissions = forms.CharField(
label="Permissions",
initial=settings.PERMISSION_PLAYER_DEFAULT,
required=False,
widget=forms.TextInput(
attrs={'size': '78'}),
help_text="In-game permissions. A comma-separated list of text "
"strings checked by certain locks. They are often used for "
"hierarchies, such as letting a Player have permission "
"'Wizards', 'Builders' etc. A Player permission can be "
"overloaded by the permissions of a controlled Character. "
"Normal players use 'Players' by default.")
db_lock_storage = forms.CharField(
label="Locks",
widget=forms.Textarea(attrs={'cols': '100', 'rows': '2'}),
required=False,
help_text="In-game lock definition string. If not given, defaults "
"will be used. This string should be on the form "
"<i>type:lockfunction(args);type2:lockfunction2(args);...")
db_cmdset_storage = forms.CharField(
label="cmdset",
initial=settings.CMDSET_PLAYER,
widget=forms.TextInput(attrs={'size': '78'}),
required=False,
help_text="python path to player cmdset class (set in "
"settings.CMDSET_PLAYER by default)")
class PlayerInline(admin.StackedInline):
"""
Inline creation of Player
"""
model = PlayerDB
template = "admin/players/stacked.html"
form = PlayerForm
fieldsets = (
("In-game Permissions and Locks",
{'fields': ('db_lock_storage',),
#{'fields': ('db_permissions', 'db_lock_storage'),
'description': "<i>These are permissions/locks for in-game use. "
"They are unrelated to website access rights.</i>"}),
("In-game Player data",
| {'fields': ('db_typeclass_path', 'db_cmdset_storage'),
'description': "<i>These | fields define in-game-specific properties "
"for the Player object in-game.</i>"}))
extra = 1
max_num = 1
class PlayerTagInline(TagInline):
"""
Inline Player Tags.
"""
model = PlayerDB.db_tags.through
related_field = "playerdb"
class PlayerAttributeInline(AttributeInline):
"""
Inline Player Attributes.
"""
model = PlayerDB.db_attributes.through
related_field = "playerdb"
class PlayerDBAdmin(BaseUserAdmin):
"""
This is the main creation screen for Users/players
"""
list_display = ('username', 'email', 'is_staff', 'is_superuser')
form = PlayerDBChangeForm
add_form = PlayerDBCreationForm
inlines = [PlayerTagInline, PlayerAttributeInline]
fieldsets = (
(None, {'fields': ('username', 'password', 'email')}),
('Website profile', {
'fields': ('first_name', 'last_name'),
'description': "<i>These are not used "
"in the default system.</i>"}),
('Website dates', {
'fields': ('last_login', 'date_joined'),
'description': '<i>Relevant only to the website.</i>'}),
('Website Permissions', {
'fields': ('is_active', 'is_staff', 'is_superuser',
'user_permissions', 'groups'),
'description': "<i>These are permissions/permission groups for "
"accessing the admin site. They are unrelated to "
"in-game access rights.</i>"}),
('Game Options', {
'fields': ('db_typeclass_path', 'db_cmdset_storage',
'db_lock_storage'),
'description': '<i>These are attributes that are more relevant '
'to gameplay.</i>'}))
# ('Game Options', {'fields': (
# 'db_typeclass_path', 'db_cmdset_storage',
# 'db_permissions', 'db_lock_storage'),
# 'description': '<i>These are attributes that are '
# 'more relevant to gameplay.</i>'}))
add_fieldsets = (
(None,
{'fields': ('username', 'password1', 'password2', 'email'),
'description': "<i>These account details are shared by the admin "
"system and the game.</i>"},),)
def save_model(self, request, obj, form, change):
"""
Custom save actions.
Args:
request (Request): Incoming re |
toumorokoshi/miura | miura/tests/test_init.py | Python | mit | 202 | 0 | from miura import _parse_filter_string |
class TestParseFilterString(object):
def parse_filter_string_no_equals(self):
""" _parse_filter_string with no equals s | hould raise an exception """
|
cournape/ensetuptools | setuptools/utils.py | Python | bsd-3-clause | 2,491 | 0.001204 | """
This module is not part of the original setuptools code.
It was created because much of the code in order modules was (and still
is) unorganized and simple utility functions were defined as class methods
for no obious reason, uselessly cluttering some of the large classes.
This module is a place such functions.
"""
import os
import sys
import subprocess
import shutil
import zipfile
from os import path
from distutils import log
try:
from os import chmod as _chmod
except ImportError:
# Jython compatibility
def _chmod(*args):
pass
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error, e:
log.debug("chmod failed: %s", e)
def rm_rf(file_or_dir):
"""
Removes the file or directory (if it exists),
returns 0 on success, 1 on failure.
"""
if not path.exists(file_or_dir):
return 0
retcode = 0
try:
if path.isdir(file_or_dir):
shutil.rmtree(file_or_dir)
else:
os.remove(file_or_dir)
except (IOError, OSError), err :
log.error("Error: could not remove %s: %s", file_or_dir, err)
retcod | e = 1
return retcode
def execute_script(py_path):
"""
Execute the python script (located at path) and log error message
when the return value of the subprocess was non-zero.
"""
retcode = subprocess.call([sys.executable, py_path])
if retcode != 0:
log.warn("WARNING: executing Python script %r returned %i",
py_path, retcode)
def sto | re_file_from_zip(zipfile_path, name, path):
"""
Given the path to a zipfile and the name of a file within the zipfile,
store the content of the file, into location path.
If the name does not exist within the zipfile, and don't create a file,
obviously, but don't throw an exceoption.
"""
z = zipfile.ZipFile(zipfile_path)
if name in z.namelist():
fo = open(path, 'wb')
fo.write(z.read(name))
fo.close()
z.close()
def samefile(p1, p2):
"""
Similar as os.path.samefile
Note:
Only on Macintosh and Unix is the function os.path.samefile available.
"""
if(hasattr(os.path, 'samefile') and
os.path.exists(p1) and
os.path.exists(p2)
):
return os.path.samefile(p1,p2)
return bool(os.path.normpath(os.path.normcase(p1)) ==
os.path.normpath(os.path.normcase(p2)))
|
it-projects-llc/website-addons | website_sale_birthdate/__manifest__.py | Python | mit | 382 | 0 | {
"name": "website_sale_birthdate",
"author": "IT-Projects LLC, Ivan Yelizariev",
| "license": "Other OSI approved licence", # MIT
| "support": "apps@itpp.dev",
"website": "https://yelizariev.github.io",
"category": "eCommerce",
"vesion": "13.0.1.0",
"depends": ["website_sale", "partner_person"],
"data": ["views.xml"],
"installable": False,
}
|
google/init2winit | init2winit/optimizer_lib/__init__.py | Python | apache-2.0 | 603 | 0.001658 | # coding=utf-8
# Copyright 2022 The init2winit Authors.
#
# Licensed under the Apach | e License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WI | THOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
lhupfeldt/multiconf | test/include_exclude2_test.py | Python | bsd-3-clause | 13,628 | 0.003082 | # Copyright (c) 2012 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
# pylint: disable=E0611
from pytest import raises
from multiconf import mc_config, ConfigItem, ConfigException, ConfigExcludedAttributeError, MC_REQUIRED, McInvalidValue
from multiconf.decorators import required, named_as, nested_repeatables
from multiconf.envs import EnvFactory
from .utils.utils import config_error, next_line_num, file_line
from .utils.compare_json import compare_json
from .utils.tstclasses import ItemWithAA, RepeatableItemWithAA
def ce(line_num, *lines):
return config_error(__file__, line_num, *lines)
ef = EnvFactory()
dev1 = ef.Env('dev1')
dev2 = ef.Env('dev2')
dev3 = ef.Env('dev3')
dev4 = ef.Env('dev4')
dev5 = ef.Env('dev5')
g_dev12 = ef.EnvGroup('g_dev12', dev1, dev2)
g_dev34 = ef.EnvGroup('g_dev23', dev3, dev4)
g_dev12_3 = ef.EnvGroup('g_dev12_3', g_dev12, dev3)
g_dev2_34 = ef.EnvGroup('g_dev2_34', dev2, g_dev34)
pp = ef.Env('pp')
prod = ef.Env('prod')
g_ppr = ef.EnvGroup('g_ppr', pp, prod)
class item(ConfigItem):
def __init__(self, anattr=MC_REQUIRED, mc_include=None, mc_exclude=None):
super().__init__(mc_include=mc_include, mc_exclude=mc_exclude)
self.anattr = anattr
self.anotherattr = None
self.b = None
@named_as('item')
@required('anitem')
class decorated_item(ConfigItem):
xx = 3
def __init__(self, mc_include=None, mc_exclude=None):
super().__init__(mc_include=mc_include, mc_exclude=mc_exclude)
self.anotherattr = MC_REQUIRED
_include_exclude_for_configitem_expected_json = """{
"__class__": "McConfigRoot",
"__id__": 0000,
"env": {
"__class__": "Env",
"name": "prod"
},
"item": false,
"item #Excluded: <class 'test.include_exclude2_test.item'>": true
}"""
def test_exclude_include_overlapping_groups_excluded_resolved_with_mc_required():
@mc_config(ef, load_now=True)
def config(_):
"""Covers exclude res | olve branch"""
with item(mc_include=[g_dev12, g_dev12_3, pp, dev2], mc_exclude=[g_dev34, g_dev2_34, dev3]) as it:
it.setattr('anattr', pp=1, g_dev12_3=2)
it.setattr('b', pp=1, dev2=0)
it.setattr('anotherattr', default=111, dev5=7)
cr = c | onfig(prod)
assert not cr.item
assert compare_json(cr, _include_exclude_for_configitem_expected_json, test_excluded=True)
cr = config(dev1)
assert cr.item
cr = config(dev2)
assert cr.item
assert cr.item.b == 0
cr = config(dev3)
assert not cr.item
cr = config(dev4)
assert not cr.item
cr = config(pp)
assert cr.item
assert cr.item.anattr == 1
assert cr.item.b == 1
assert cr.item.anotherattr == 111
_include_exclude_for_configitem_required_decorator_expected_json = """{
"__class__": "McConfigRoot",
"__id__": 0000,
"env": {
"__class__": "Env",
"name": "prod"
},
"item": false,
"item #Excluded: <class 'test.include_exclude2_test.decorated_item'>": true
}"""
def test_exclude_include_overlapping_groups_excluded_resolved_with_required_decorator():
class anitem(ConfigItem):
xx = 222
@mc_config(ef, load_now=True)
def config(_):
"""Covers exclude resolve branch"""
with decorated_item(mc_include=[g_dev12, g_dev12_3, pp, dev2], mc_exclude=[g_dev34, g_dev2_34, dev3]) as it:
anitem()
it.setattr('anotherattr', default=111, dev5=7)
cr = config(prod)
assert not cr.item
assert compare_json(cr, _include_exclude_for_configitem_required_decorator_expected_json, test_excluded=True)
cr = config(dev1)
assert cr.item
cr = config(dev2)
assert cr.item
cr = config(dev3)
assert not cr.item
cr = config(dev4)
assert not cr.item
cr = config(pp)
assert cr.item.xx == 3
assert cr.item.anitem.xx == 222
assert cr.item.anotherattr == 111
def test_exclude_include_overlapping_groups_included_resolved():
@mc_config(ef, load_now=True)
def config(_):
"""Covers include resolve branch"""
with item(mc_include=[dev3, g_dev12, g_dev12_3, pp, dev2], mc_exclude=[g_dev34, g_dev2_34]) as it:
it.setattr('anattr', pp=1, g_dev12_3=2, dev5=117, g_ppr=4)
cr = config(prod)
assert not cr.item
assert compare_json(cr, _include_exclude_for_configitem_expected_json, test_excluded=True)
cr = config(dev1)
assert cr.item
cr = config(dev2)
assert cr.item
cr = config(dev3)
assert cr.item
cr = config(dev4)
assert not cr.item
cr = config(pp)
assert cr.item
assert cr.item.anattr == 1
_exclude_include_overlapping_groups_excluded_unresolved_expected_ex1 = """
ConfigException: Env('dev2') is specified in both include and exclude, with no single most specific group or direct env:
- from exclude: EnvGroup('g_dev2_34') {
Env('dev2'),
EnvGroup('g_dev23') {
Env('dev3'),
Env('dev4')
}
}
- from include: EnvGroup('g_dev12_3') {
EnvGroup('g_dev12') {
Env('dev1'),
Env('dev2')
},
Env('dev3')
}
Error in config for Env('dev2') above.
""".lstrip()
_exclude_include_overlapping_groups_excluded_unresolved_expected_ex2 = """
ConfigException: Env('dev3') is specified in both include and exclude, with no single most specific group or direct env:
- from exclude: EnvGroup('g_dev23') {
Env('dev3'),
Env('dev4')
}
- from include: EnvGroup('g_dev12_3') {
EnvGroup('g_dev12') {
Env('dev1'),
Env('dev2')
},
Env('dev3')
}
Error in config for Env('dev3') above.
""".lstrip()
def test_exclude_include_overlapping_groups_excluded_unresolved_init(capsys):
errorline = [None]
@mc_config(ef)
def config(_):
errorline[0] = next_line_num()
item(anattr=1, mc_include=[g_dev12_3, pp], mc_exclude=[g_dev34, g_dev2_34])
with raises(ConfigException):
config.load(error_next_env=True)
_sout, serr = capsys.readouterr()
assert _exclude_include_overlapping_groups_excluded_unresolved_expected_ex1 in serr
assert serr.endswith(_exclude_include_overlapping_groups_excluded_unresolved_expected_ex2)
_exclude_include_overlapping_groups_excluded_unresolved_init_reversed_ex = """
Env('dev2') is specified in both include and exclude, with no single most specific group or direct env:
- from exclude: EnvGroup('g_dev12_3') {
EnvGroup('g_dev12') {
Env('dev1'),
Env('dev2')
},
Env('dev3')
}
- from include: EnvGroup('g_dev2_34') {
Env('dev2'),
EnvGroup('g_dev23') {
Env('dev3'),
Env('dev4')
}
}""".strip()
def test_exclude_include_overlapping_groups_excluded_unresolved_init_reversed():
errorline = [None]
with raises(ConfigException) as exinfo:
@mc_config(ef, load_now=True)
def config(_):
errorline[0] = next_line_num()
item(anattr=1, mc_include=[g_dev34, g_dev2_34], mc_exclude=[g_dev12_3, pp])
assert _exclude_include_overlapping_groups_excluded_unresolved_init_reversed_ex in str(exinfo.value)
_exclude_include_overlapping_groups_excluded_unresolved_mc_select_envs_expected = """
ConfigError: Env('dev2') is specified in both include and exclude, with no single most specific group or direct env:
- from exclude: EnvGroup('g_dev2_34') {
Env('dev2'),
EnvGroup('g_dev23') {
Env('dev3'),
Env('dev4')
}
}
- from include: EnvGroup('g_dev12_3') {
EnvGroup('g_dev12') {
Env('dev1'),
Env('dev2')
},
Env('dev3')
}
""".strip()
def test_exclude_include_overlapping_groups_excluded_unresolved_mc_select_envs(capsys):
errorline = [None]
with raises(ConfigException) as exinfo:
@mc_config(ef, load_now=True)
def config(_):
with item(anattr=1) as it:
errorline[0] = next_line_num()
it.mc_select_envs(include=[g_dev12_3, pp], exclude=[g_dev34, g_dev2_34])
_sout, serr = capsys.readouterr()
assert serr.startswith(file_line(__file__, errorline[0]))
assert _exclude_include_overlapping_groups_excluded_unresolved_mc_select_envs_expected in serr
assert "There wa |
xu2243051/idc_assets_management | idc_assets_management/idc_assets_management/settings/base.py | Python | gpl-2.0 | 7,281 | 0.004944 | """Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Your Name', 'your_email@example.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/ | #static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATI | C_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(SITE_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key should only be used for development and testing.
SECRET_KEY = r"n_e2u6ovn212spq@%z_zkbx19!o&0^nji=*x%hl=y#-xzt+4jr"
########## END SECRET CONFIGURATION
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
########## END SITE CONFIGURATION
########## FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(SITE_ROOT, 'fixtures')),
)
########## END FIXTURE CONFIGURATION
########## TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(SITE_ROOT, 'templates')),
)
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
# Database migration helpers:
'south',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'manager',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
########## END WSGI CONFIGURATION
|
andyvandermeyde/match_subtitles | test/test_suite.py | Python | mit | 1,796 | 0.012249 | import unittest
import os
import inspect
import shutil
import config
import rename_subtitles
import upload_subtitles
class TestSuit(unittest.TestCase):
# TESTS
def test_basic_match(self):
rename_subtitles.main()
upload_subtitles.main()
#Match simple Test_Series_S01E01.srt with Test_Series_S01E01.mp4
new_subtitle_path = os.path.join(root_folder, "Test_Series_S01E01.srt")
#check that the subtitle was moved to the root folder
self.assertTrue(os.path.exists(new_subtitle_path))
#check that the srt doesn't exist in the upload folder anymore
old_subtitle_path = os.path.join(upload_folder, "Test_Series_S01E01.srt")
self.assertFalse(os.path.exists(old_subtitle_path))
#EXECUTION
#Run the tests, setup environment first
if __name__ == '__main__':
#Need to create an upload folder
#and a root folder
directory = os.path.dirname(os.path | .abspath(inspect.getfile(inspect.currentframe())))
print directory
resources = os.path.join(directory, "resources/")
#If the path already exists
if os.path.exists(resources):
#Remove everything to refresh the test environment
shutil.rmtree(resources)
#Create the resources folder
os.makedirs(resources)
root_folder = os.path.join(resources | , "root/")
upload_folder = os.path.join(resources, "upload/")
#Create root folder
os.makedirs(root_folder)
#Create upload folder
os.makedirs(upload_folder)
#and some empty files for the tests
open(os.path.join(root_folder, "Test_Series_S01E01.mp4"), 'a').close()
open(os.path.join(upload_folder, "Test_Series_S01E01.srt"), 'a').close()
config.set("upload_dir", upload_folder)
config.set("root_dir", root_folder)
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.