repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
TeePaps/estreamer | estreamer/eventrequest.py | Python | apache-2.0 | 5,410 | 0.0122 | #local
from message import MessageHeader
from base import Struct, StructArray
#standard
from datetime import datetime
from ctypes import LittleEndianStructure, Union, c_uint32
from six import iteritems, raise_from
import inspect
import config
import sys
class Error(Exception): pass
class InvalidTimestampError(Error): pass
class InvalidFlagError(Error): pass
class INTRUSION_EVENTS(object):
version = 8 # 5.4+
code = 12
class METADATA(object):
version = 4 #4.7+
code = 21
class CORRELATION(object):
version = 9 #5.4+
code = 31
class DISCOVERY(object):
version = 11 #5.3.1+
code = 61
class CONNECTION(object):
version = 12 #5.4+
code = 71
class USER(object):
version = 4 #5.2+
code = 91
class MALWARE(object):
version = 6 #5.4+
code = 101
class FILE(object):
version = 5 #5.4+
code = 111
class IMPACT(object):
version = 2 #5.3+
code = 131
class TERMINATE(object):
version = 0
code = 0
class StreamingEventType(Struct):
_fields_ = [
('version', 'uint16', 0),
('code', 'uint16', 0),
]
class StreamingEventRequest(Struct):
_fields_ = [
('type', 'uint32', 6667),
('length', 'uint32', 0),
('flags', 'uint32', 0),
('timestamp', 'uint32', 0),
('service_array', StructArray(StreamingEventType), None)
]
def __init__(self, *args, **kwargs):
super(StreamingEventRequest, self).__init__(*args, **kwargs)
if 'length' not in kwargs:
self.length = sum([len(i) for i in self.service_array] + [16])
class FlagBits(LittleEndianStructure):
_fields_ = [
('packets', c_uint32, 1),
('metadata', c_uint32, 1),
('ids', c_uint32, 1),
('discovery', c_uint32, 1),
('correlation', c_uint32, 1),
('impact', c_uint32, 1),
('ids_1', c_uint32, 1),
('discovery_v2', c_uint32, 1),
('connection', c_uint32, 1),
('correlation_v2', c_uint32, 1),
('discovery_v3', c_uint32, 1),
('disable_events', c_uint32, 1),
('connection_v3', c_uint32, 1),
('correlation_v3', c_uint32, 1),
('metadata_v2', c_uint32, 1),
('metadata_v3', c_uint32, 1),
('reserved', c_uint32, | 1),
('discovery_v4', c_uint32, 1),
( | 'connection_v4', c_uint32, 1),
('correlation_v4', c_uint32, 1),
('metadata_v4', c_uint32, 1),
('user', c_uint32, 1),
('correlation_v5', c_uint32, 1),
('timestamp', c_uint32, 1),
('discovery_v5', c_uint32, 1),
('discovery_v6', c_uint32, 1),
('connection_v5', c_uint32, 1),
('extra_data', c_uint32, 1),
('discovery_v7', c_uint32, 1),
('correlation_v6', c_uint32, 1),
('extended_request', c_uint32, 1),
]
class Flags(Union):
_fields_ = [
('flag', FlagBits),
('from_bytes', c_uint32)
]
class EventRequest(Struct):
_fields_ = [
('timestamp', 'uint32', 0xFFFFFFFF),
('flags', 'uint32', 0x40800001)
]
'''
Extended Request Event code and versioning
'''
class RequestEvent(object):
def __init__(self, start_from, **kwargs):
self.flags = Flags()
if start_from == 0 or start_from == 0xFFFFFFFF:
self.timestamp = start_from
else:
try:
datetime.fromtimestamp(start_from)
except TypeError as exc:
raise_from(InvalidTimestampError('Timestamp invalid (0, 0xFFFFFFFF, or Unix Timestamp'), exc)
else:
self.timestamp = start_from
for k,v in iteritems(kwargs):
try:
getattr(self.flags.flag, k)
setattr(self.flags.flag, k, int(v))
except AttributeError as exc:
raise_from(InvalidFlagError('Invalid flag: {}'.format(k)), exc)
# save the timestamp and flags for reuse (if needed)
Struct.set_ts(self.timestamp)
Struct.set_flags(self.flags.from_bytes)
# build the request
self.event_request = EventRequest(timestamp=self.timestamp,flags=self.flags.from_bytes)
self.message_header = MessageHeader(type=2, data=self.event_request.pack())
self.record = self.message_header.pack()
class StreamEventRequest(object):
def __init__(self, type_list):
mod_name = sys.modules[__name__]
cls_list = [
cls[0]
for cls in inspect.getmembers(mod_name)
if cls[0].isupper() and inspect.isclass(cls[1]) and cls[1].__module__ == __name__
]
try:
type_list.remove('TERMINATE') # can't hold order, so remove it and add it back when done
except ValueError as exc:
pass
type_list = list(set(type_list).intersection(set(cls_list))) # remove bad and duplicate values
array_args = [
{'version': getattr(getattr(mod_name, rtype), 'version'),
'code': getattr(getattr(mod_name, rtype), 'code')}
for rtype in type_list
]
array_args.append({'code': 0, 'version': 0}) # add TERMINATE as last req
self.streaming_event_request = StreamingEventRequest(service_array=array_args, timestamp=Struct.get_ts(), flags=Struct.get_flags())
self.message_header = MessageHeader(type=2049, data=self.streaming_event_request)
self.record = self.message_header.pack()
|
tapo-it/odoo-addons-worktrail | addons_worktrail/tapoit_worktrail/model/tapoit_worktrail.py | Python | agpl-3.0 | 13,254 | 0.003546 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2012 TaPo-IT (http://tapo-it.at) All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import netsvc, pooler
from openerp.osv import orm, osv, fields
from openerp.tools.translate import _
import json
import logging
import time
_logger = logging.getLogger(__name__)
class tapoit_worktrail_config(orm.Model):
""" TaPo-IT Worktrail Config """
_name = "tapoit_worktrail.server.conf"
_description = "Worktrail Server Configuration"
def create(self, cr, uid, vals, context=None):
context = dict(context or {})
response = self.get_request_key(cr, uid, vals)
if 'authtoken' in response:
vals['request_key'] = response['requestkey']
vals['auth_token'] = response['authtoken']
vals['access_granted'] = 'pending'
vals['redirect_url'] = response['redirecturl'].replace('tapolan', 'net')
new_id = super(tapoit_worktrail_config, self).create(cr, uid, vals, context=context)
return new_id
def write(self, cr, uid, ids, vals, context=None):
context = dict(context or {})
_logger.info("DEBUG: %s", vals)
for config in self.browse(cr, uid, ids):
sync_type = vals.get('type', '')
if config.type:
sync_type = config.type
values = {
'host': vals.get('host', config.host),
'secure': vals.get('secure', config.secure),
'port': vals.get('port', config.port),
'type': sync_type,
'app_key': vals.get('app_key', config.app_key),
'secret_api_key': vals.get('secret_api_key', config.secret_api_key),
}
if not vals.get('auth_token', config.auth_token):
response = self.get_request_key(cr, uid, values)
if 'authtoken' in response:
vals['request_key'] = response['requestkey']
vals['auth_token'] = response['authtoken']
vals['redirect_url'] = response['redirecturl'].replace('tapolan', 'net')
values['request_key'] = vals.get('request_key', config.request_key)
if values['request_key']:
response = self.test_auth_token(cr, uid, values)
if 'status' in response:
vals['access_granted'] = response['status']
# header['X-AUTHTOKEN']
# url = "%s://%s:%s/rest/token/auth/?requestkey=%s" % (config.protocol, config.host, config.port, response['requestkey'])
# response = self.pool.get('tapoit_worktrail.sync.execution').json_request(cr, uid, url, data, header=header)
return super(tapoit_worktrail_config, self).write(cr, uid, ids, vals, context=context)
def get_request_key(self, cr, uid, vals):
protocol = 'http'
if 'secure' in vals:
protocol = 'https'
header = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8',
'X-APPKEY': vals['app_key'],
'X-SECRETAPIKEY': vals['secret_api_key'],
}
if 'type' in vals:
if vals['type'] == 'workentries':
data = {
'accesstype': 'company',
'scopes': 'read-tasks,write-tasks,read-employees,read-workentries,write-workentries',
}
elif vals['type'] == 'hubstream-personal':
data = {
'accesstype': 'employee',
'scopes': 'sync-hub-data,read-employees',
}
else:
raise orm.except_orm(_('Error !'), _('Type(%s) not possible!') % vals['type'])
url = "%s://%s:%s | /rest/token/request/" % (protocol, vals[ | 'host'], vals['port'])
return self.pool.get('tapoit_worktrail.sync.execution').json_request(cr, uid, url, data, header=header)
def test_auth_token(self, cr, uid, vals):
protocol = 'http'
if 'secure' in vals:
protocol = 'https'
header = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8',
'X-APPKEY': vals['app_key'],
'X-SECRETAPIKEY': vals['secret_api_key'],
}
data = {
'requestkey': vals['request_key'],
}
url = "%s://%s:%s/rest/token/confirm/" % (protocol, vals['host'], vals['port'])
return self.pool.get('tapoit_worktrail.sync.execution').json_request(cr, uid, url, data, header=header)
def sync_messages_hubstream(self, cr, uid, ids, context=None):
for config in self.browse(cr, uid, ids):
protocol = 'http'
if config.secure:
protocol = 'https'
header = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8',
'X-APPKEY': config.app_key,
'X-SECRETAPIKEY': config.secret_api_key,
'X-AUTHTOKEN': config.auth_token,
}
url = "%s://%s:%s/rest/hub/entries/clean/" % (protocol, config.host, config.port)
self.pool.get('tapoit_worktrail.sync.execution').json_request(cr, uid, url, {}, header=header)
create = []
user = self.pool.get('res.users').browse(cr, uid, uid)
notifications = self.pool.get('mail.message')
message_ids = notifications.search(cr, uid, [('author_id', '=', user.partner_id.id)])
for message in notifications.browse(cr, uid, message_ids):
create.append(
{
'time': self.pool.get('tapoit_worktrail.sync.execution').convertDatetime2Timestamp(cr, uid, message.date),
# 'endtime': OPTIONAL,
'srctype': 'other',
'summary': message.body
# 'link': OPTIONAL,
}
)
# _logger.info("Hubentries: %s", create)
hubentries = {
'create': create
}
data = {'data': json.dumps(hubentries)}
url = "%s://%s:%s/rest/hub/entries/create/" % (protocol, config.host, config.port)
return self.pool.get('tapoit_worktrail.sync.execution').json_request(cr, uid, url, data, header=header)
def get_status(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {})
def reset_app(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'auth_token': False, 'request_key': False})
STATE_ACCESS = [
('pending', 'Pending'),
('active', 'Active'),
('rejected', 'Rejected'),
]
_columns = {
'active': fields.boolean('Active'),
'dbname': fields.char('Local Database Name', size=80, required=True, help="This will constraint the sync to a certain database which does protect data integrity"),
'name': fields.char('Name', size=50, select=1),
'host': fields.char('Remote Host', size=200, required=True, select=1),
'port': fields.char('Remote Port', size=5, required=True, select=1),
'type': fields.selection(
(
('workentries', 'Project/Task/Work Sync'),
('hubstream', 'Company Hub |
rednaxelafx/apache-spark | python/pyspark/tests/test_appsubmit.py | Python | apache-2.0 | 10,321 | 0.00126 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the Lice | nse.
#
import os
import re
import shutil
import subprocess
import tempfile
import unittest
import zipfile
class SparkSubmitTests(unittest.TestCase):
def setUp(self):
self.programDir = tempfile.mkdtemp()
tmp_dir = tempfile.gettempdir()
self.sparkSubmit = [
os.path.join(os.environ.get("SPARK_HOME"), "bin", "spark-submit"),
"--conf", "spark.dr | iver.extraJavaOptions=-Djava.io.tmpdir={0}".format(tmp_dir),
"--conf", "spark.executor.extraJavaOptions=-Djava.io.tmpdir={0}".format(tmp_dir),
]
def tearDown(self):
shutil.rmtree(self.programDir)
def createTempFile(self, name, content, dir=None):
"""
Create a temp file with the given name and content and return its path.
Strips leading spaces from content up to the first '|' in each line.
"""
pattern = re.compile(r'^ *\|', re.MULTILINE)
content = re.sub(pattern, '', content.strip())
if dir is None:
path = os.path.join(self.programDir, name)
else:
os.makedirs(os.path.join(self.programDir, dir))
path = os.path.join(self.programDir, dir, name)
with open(path, "w") as f:
f.write(content)
return path
def createFileInZip(self, name, content, ext=".zip", dir=None, zip_name=None):
"""
Create a zip archive containing a file with the given content and return its path.
Strips leading spaces from content up to the first '|' in each line.
"""
pattern = re.compile(r'^ *\|', re.MULTILINE)
content = re.sub(pattern, '', content.strip())
if dir is None:
path = os.path.join(self.programDir, name + ext)
else:
path = os.path.join(self.programDir, dir, zip_name + ext)
zip = zipfile.ZipFile(path, 'w')
zip.writestr(name, content)
zip.close()
return path
def create_spark_package(self, artifact_name):
group_id, artifact_id, version = artifact_name.split(":")
self.createTempFile("%s-%s.pom" % (artifact_id, version), ("""
|<?xml version="1.0" encoding="UTF-8"?>
|<project xmlns="http://maven.apache.org/POM/4.0.0"
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
| xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
| http://maven.apache.org/xsd/maven-4.0.0.xsd">
| <modelVersion>4.0.0</modelVersion>
| <groupId>%s</groupId>
| <artifactId>%s</artifactId>
| <version>%s</version>
|</project>
""" % (group_id, artifact_id, version)).lstrip(),
os.path.join(group_id, artifact_id, version))
self.createFileInZip("%s.py" % artifact_id, """
|def myfunc(x):
| return x + 1
""", ".jar", os.path.join(group_id, artifact_id, version),
"%s-%s" % (artifact_id, version))
def test_single_script(self):
"""Submit and test a single script file"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(lambda x: x * 2).collect())
""")
proc = subprocess.Popen(self.sparkSubmit + [script], stdout=subprocess.PIPE)
out, err = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertIn("[2, 4, 6]", out.decode('utf-8'))
def test_script_with_local_functions(self):
"""Submit and test a single script file calling a global function"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|
|def foo(x):
| return x * 3
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(foo).collect())
""")
proc = subprocess.Popen(self.sparkSubmit + [script], stdout=subprocess.PIPE)
out, err = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertIn("[3, 6, 9]", out.decode('utf-8'))
def test_module_dependency(self):
"""Submit and test a script with a dependency on another module"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|from mylib import myfunc
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(myfunc).collect())
""")
zip = self.createFileInZip("mylib.py", """
|def myfunc(x):
| return x + 1
""")
proc = subprocess.Popen(self.sparkSubmit + ["--py-files", zip, script],
stdout=subprocess.PIPE)
out, err = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertIn("[2, 3, 4]", out.decode('utf-8'))
def test_module_dependency_on_cluster(self):
"""Submit and test a script with a dependency on another module on a cluster"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|from mylib import myfunc
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(myfunc).collect())
""")
zip = self.createFileInZip("mylib.py", """
|def myfunc(x):
| return x + 1
""")
proc = subprocess.Popen(self.sparkSubmit + ["--py-files", zip, "--master",
"local-cluster[1,1,1024]", script],
stdout=subprocess.PIPE)
out, err = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertIn("[2, 3, 4]", out.decode('utf-8'))
def test_package_dependency(self):
"""Submit and test a script with a dependency on a Spark Package"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|from mylib import myfunc
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(myfunc).collect())
""")
self.create_spark_package("a:mylib:0.1")
proc = subprocess.Popen(
self.sparkSubmit + ["--packages", "a:mylib:0.1", "--repositories",
"file:" + self.programDir, script],
stdout=subprocess.PIPE)
out, err = proc.communicate()
self.assertEqual(0, proc.returncode)
self.assertIn("[2, 3, 4]", out.decode('utf-8'))
def test_package_dependency_on_cluster(self):
"""Submit and test a script with a dependency on a Spark Package on a cluster"""
script = self.createTempFile("test.py", """
|from pyspark import SparkContext
|from mylib import myfunc
|
|sc = SparkContext()
|print(sc.parallelize([1, 2, 3]).map(myfunc).collect())
""")
self.create_spark_package("a:mylib:0.1")
proc = subprocess.Popen(
self.sparkSubmit + ["--packages", "a:mylib:0.1", "--repositories",
"file:" + self.programDir, "--master", "local-cluster[1,1,1024]",
script],
stdout=subprocess. |
alexises/python-cah | pythonCah/log.py | Python | lgpl-3.0 | 431 | 0 | import logging
import sys
def prepareLogging(loggingLevel):
logger = logging.get | Logger()
logger.setLevel(loggingLevel)
formatStr = \
'%(asctime)s [%(levelname)-8s] %(filename)s:%(lineno)s %(message)s'
formater = logging.Formatter(formatStr, '%H:%M:%S')
handler = logging.StreamHandler(sys.stdout)
| handler.setFormatter(formater)
handler.setLevel(loggingLevel)
logger.addHandler(handler)
|
laslabs/vertical-medical | sale_medical_prescription/tests/test_sale_order.py | Python | agpl-3.0 | 1,725 | 0 | # -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License GPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from odoo.tests.common import TransactionCase
class TestSaleOrder(TransactionCase):
def setUp(self):
super(TestSaleOrder, self).setUp()
self.sale_7 = self.env.ref(
'sale_medical_prescription.sale_order_medical_order_7'
)
def test_compute_patient_ids(self):
""" Test patient_ids properly computed """
exp = self.sale_7.order_line.mapped('patient_id').ids
exp = sorted(set(exp))
res = sorted(self.sale_7.patient_ids.ids)
self.assertEqual(
res, exp,
)
def test_compute_prescription_order_ids(self):
""" Test rx orders properly computed """
exp = self.sale_7.order_line.mapped(
'prescription_order_line_id.prescription_order_id'
).ids
exp = sorted(set(exp))
res = sorted(self.sale_7.prescription_order_ids.ids)
self.assertEqual(
res, exp,
)
def test_compute_prescription_order_line_ids(self):
""" Test rx_lines properly computed """
exp = self.sale_7.order_line.mapped('prescription_order_line_id').ids
exp = sorted(set(exp))
res = so | rted(self.sale_7.prescription_order_line_ids | .ids)
self.assertEqual(
res, exp,
)
def test_compute_prescription_order_line_count(self):
""" Test rx line count properly computed """
exp = self.sale_7.order_line.mapped('prescription_order_line_id').ids
exp = len(exp)
res = len(self.sale_7.prescription_order_line_ids.ids)
self.assertEqual(
res, exp,
)
|
sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/celery/tests/concurrency/test_concurrency.py | Python | bsd-3-clause | 3,179 | 0 | from __future__ import absolute_import
import os
from itertools import count
from mock import Mock
from celery.concurrency.base import apply_target, BasePool
from celery.tests.case import AppCase
class test_BasePool(AppCase):
def test_apply_target(self):
scratch = {}
counter = count(0)
def gen_callback(name, retval=None):
def callback(*args):
scratch[name] = (next(counter), args)
return retval
return callback
apply_target(gen_callback('target', 42),
args=(8, 16),
callback=gen_callback('callback'),
accept_callback=gen_callback('accept_callback'))
self.assertDictContainsSubset(
{'target': (1, (8, 16)), 'callback': (2, (42, ))},
scratch,
)
pa1 = scratch['accept_callback']
self.assertEqual(0, pa1[0])
self.assertEqual(pa1[1][0], os.getpid())
self.assertTrue(pa1[1][1])
# No accept callback
scratch.clear()
apply_target(gen_callback('target', 42),
args=(8, 16),
callback=gen_callback('callback'),
accept_callback=None)
self.assertDictEqual(scratch,
{'target': (3, (8, 16)),
'callback': (4, (42, ))})
def test_does_not_debug(self):
x = BasePool(10)
x._does_debug = False
x.apply_async(object)
def test_num_processes(self):
self.assertEqual(BasePool(7).num_processes, 7)
def test_interface_on_start(self):
BasePool(10).on_start()
def test_interface_on_stop(self):
BasePool(10).on_stop()
def test_interface_on_apply(self):
BasePool(10).on_apply()
def test_interface_info(self):
self.assertDictEqual(BasePool(10).info, {})
def test_active(self):
p = BasePool( | 10)
self.assertFalse(p.active)
p._state = p.RUN
self.assertTrue(p.active)
def test_restart(self):
| p = BasePool(10)
with self.assertRaises(NotImplementedError):
p.restart()
def test_interface_on_terminate(self):
p = BasePool(10)
p.on_terminate()
def test_interface_terminate_job(self):
with self.assertRaises(NotImplementedError):
BasePool(10).terminate_job(101)
def test_interface_did_start_ok(self):
self.assertTrue(BasePool(10).did_start_ok())
def test_interface_register_with_event_loop(self):
self.assertIsNone(
BasePool(10).register_with_event_loop(Mock()),
)
def test_interface_on_soft_timeout(self):
self.assertIsNone(BasePool(10).on_soft_timeout(Mock()))
def test_interface_on_hard_timeout(self):
self.assertIsNone(BasePool(10).on_hard_timeout(Mock()))
def test_interface_close(self):
p = BasePool(10)
p.on_close = Mock()
p.close()
self.assertEqual(p._state, p.CLOSE)
p.on_close.assert_called_with()
def test_interface_no_close(self):
self.assertIsNone(BasePool(10).on_close())
|
yelizariev/addons-yelizariev | web_multi_attachment_base/__init__.py | Python | lgpl-3.0 | 70 | 0 | # | License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.ht | ml).
|
detrout/debian-statsmodels | statsmodels/tsa/api.py | Python | bsd-3-clause | 661 | 0 | from .ar_model import AR
from .arima_model import ARMA, ARIMA
from . import vector_ar as var
from .arima_process import arma_generate_sample, ArmaProcess
from .vector_ar.var_model import VAR
from .vector_ar.svar_model import SVAR
from .vector_ar.dynamic import DynamicVAR
from .filters import api as filters
from . import tsatool | s
from .tsatools import (ad | d_trend, detrend, lagmat, lagmat2ds, add_lag)
from . import interp
from . import stattools
from .stattools import *
from .base import datetools
from .seasonal import seasonal_decompose
from ..graphics import tsaplots as graphics
from .x13 import x13_arima_select_order
from .x13 import x13_arima_analysis
|
mattbernst/polyhartree | support/ansible/modules/extras/packaging/os/svr4pkg.py | Python | gpl-3.0 | 7,426 | 0.008888 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Boyd Adamson <boyd () boydadamson.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: svr4pkg
short_description: Manage Solaris SVR4 packages
description:
- Manages SVR4 packages on Solaris 10 and 11.
- These were the native packages on Solaris <= 10 and are available
as a legacy feature in Solaris 11.
- Note that this is a very basic packaging system. It will not enforce
dependencies on install or remove.
version_added: "0.9"
author: Boyd Adamson
options:
name:
description:
- Package name, e.g. C(SUNWcsr)
required: true
state:
description:
- Whether to install (C(present)), or remove (C(absent)) a package.
- If the package is to be installed, then I(src) is required.
- The SVR4 package system doesn't provide an upgrade operation. You need to uninstall the old, then install the new package.
required: true
choices: ["present", "absent"]
src:
description:
- Specifies the location to install the package from. Required when C(state=present).
- "Can be any path acceptable to the C(pkgadd) command's C(-d) option. e.g.: C(somefile.pkg), C(/dir/with/pkgs), C(http:/server/mypkgs.pkg)."
- If using a file or directory, they must already be accessible by the host. See the M(copy) module for a way to get them there.
proxy:
descrip | tion:
- HTTP[s] proxy to be used if C(src) is a URL.
response_file:
description:
- Specifies the location of a response file to be used if package expects input on install. (added in Ansible 1.4)
required: false
zone:
| description:
- Whether to install the package only in the current zone, or install it into all zones.
- The installation into all zones works only if you are working with the global zone.
required: false
default: "all"
choices: ["current", "all"]
version_added: "1.6"
category:
description:
- Install/Remove category instead of a single package.
required: false
choices: ["true", "false"]
version_added: "1.6"
'''
EXAMPLES = '''
# Install a package from an already copied file
- svr4pkg: name=CSWcommon src=/tmp/cswpkgs.pkg state=present
# Install a package directly from an http site
- svr4pkg: name=CSWpkgutil src=http://get.opencsw.org/now state=present zone=current
# Install a package with a response file
- svr4pkg: name=CSWggrep src=/tmp/third-party.pkg response_file=/tmp/ggrep.response state=present
# Ensure that a package is not installed.
- svr4pkg: name=SUNWgnome-sound-recorder state=absent
# Ensure that a category is not installed.
- svr4pkg: name=FIREFOX state=absent category=true
'''
import os
import tempfile
def package_installed(module, name, category):
cmd = [module.get_bin_path('pkginfo', True)]
cmd.append('-q')
if category:
cmd.append('-c')
cmd.append(name)
rc, out, err = module.run_command(' '.join(cmd))
if rc == 0:
return True
else:
return False
def create_admin_file():
(desc, filename) = tempfile.mkstemp(prefix='ansible_svr4pkg', text=True)
fullauto = '''
mail=
instance=unique
partial=nocheck
runlevel=quit
idepend=nocheck
rdepend=nocheck
space=quit
setuid=nocheck
conflict=nocheck
action=nocheck
networktimeout=60
networkretries=3
authentication=quit
keystore=/var/sadm/security
proxy=
basedir=default
'''
os.write(desc, fullauto)
os.close(desc)
return filename
def run_command(module, cmd):
progname = cmd[0]
cmd[0] = module.get_bin_path(progname, True)
return module.run_command(cmd)
def package_install(module, name, src, proxy, response_file, zone, category):
adminfile = create_admin_file()
cmd = [ 'pkgadd', '-n']
if zone == 'current':
cmd += [ '-G' ]
cmd += [ '-a', adminfile, '-d', src ]
if proxy is not None:
cmd += [ '-x', proxy ]
if response_file is not None:
cmd += [ '-r', response_file ]
if category:
cmd += [ '-Y' ]
cmd.append(name)
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def package_uninstall(module, name, src, category):
adminfile = create_admin_file()
if category:
cmd = [ 'pkgrm', '-na', adminfile, '-Y', name ]
else:
cmd = [ 'pkgrm', '-na', adminfile, name]
(rc, out, err) = run_command(module, cmd)
os.unlink(adminfile)
return (rc, out, err)
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required = True),
state = dict(required = True, choices=['present', 'absent']),
src = dict(default = None),
proxy = dict(default = None),
response_file = dict(default = None),
zone = dict(required=False, default = 'all', choices=['current','all']),
category = dict(default=False, type='bool')
),
supports_check_mode=True
)
state = module.params['state']
name = module.params['name']
src = module.params['src']
proxy = module.params['proxy']
response_file = module.params['response_file']
zone = module.params['zone']
category = module.params['category']
rc = None
out = ''
err = ''
result = {}
result['name'] = name
result['state'] = state
if state == 'present':
if src is None:
module.fail_json(name=name,
msg="src is required when state=present")
if not package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_install(module, name, src, proxy, response_file, zone, category)
# Stdout is normally empty but for some packages can be
# very long and is not often useful
if len(out) > 75:
out = out[:75] + '...'
elif state == 'absent':
if package_installed(module, name, category):
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = package_uninstall(module, name, src, category)
out = out[:75]
# Success, Warning, Interruption, Reboot all, Reboot this return codes
if rc in (0, 2, 3, 10, 20):
result['changed'] = True
# no install nor uninstall, or failed
else:
result['changed'] = False
# Fatal error, Administration, Administration Interaction return codes
if rc in (1, 4 , 5):
result['failed'] = True
else:
result['failed'] = False
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
robinson96/GRAPE | test/testClone.py | Python | bsd-3-clause | 4,326 | 0.005779 | import os
import shutil
import sys
import tempfile
import unittest
import testGrape
if not ".." in sys.path:
sys.path.insert(0, "..")
from vine import grapeMenu, clone, grapeGit as git
class TestClone(testGrape.TestGrape):
def testClone(self):
self.setUpConfig()
self.queueUserInput(['\n', '\n', '\n', '\n'])
args = [self.repo, self.repos[1], "--recursive"]
ret =grapeMenu.menu().applyMenuChoice("clone", args)
self.assertTrue(ret)
# check to make sure we didn't get a usage string dump
contents = self.output.getvalue()
self.assertNotIn(contents, "Usage: grape-clone")
# check to make sure we didn't see a GRAPE WARNING
self.assertNotIn("WARNING", contents, "GRAPE ISSUED A WARNING DURING A CLONE\n%s" % contents)
# check to make sure the new repo has the old repo as a remote
os.chdir(self.repos[1])
remote = git.showRemote()
self.assertIn(self.repo, remote)
self.assertTrue(ret)
def testHelpMessage(self):
args = ["--help"]
with self.assertRaises(SystemExit):
ret = grapeMenu.menu().applyMenuChoice("clone", args)
self.assertIn(clone.Clone.__doc__, self.output.getvalue())
def testClone02(self):
tempDir = tempfile.mkdtemp()
args = [self.repo, tempDir]
try:
self.queueUserInput(["\n", "\n", "\n", "\n"])
ret = grapeMenu.menu().applyMenuChoice("clone", args)
self.assertTrue(ret, "vine.clone returned failure")
#ToDo: Finish checking contents
#contents = self.output.getvalue()
#self.stdout(contents)
finally:
shutil.rmtree(tempDir)
def testRecursiveCloneWithSubmodule(self):
# make a repo to turn into a submodule
git.clone("--mirror %s %s " % (self.repo, self.repos[1]))
# add repo2 as a submodule to repo1
os.chdir(self.repo)
git.submodule("add %s %s" % (os.path.join(self.repos[1]), "submodule1"))
git.commit("-m \"added submodule1\"")
#Now clone the repo into a temp dir and make sure the submodule is in the clone
try:
tempDir = tempfile.mkdtemp()
args = [self.repo, tempDir, "--recursive"]
self.queueUserInput(["\n", "\n", "\n", "\n","\n","\n"])
ret = grapeMenu.menu().applyMenuChoice("clone", args)
self.assertTrue(ret, "vine.clone returned failure")
submodulepath = os.path.join(tempDir, "submodule1")
self.assertTrue(os.path.exists(submodulepath), "submodule1 does not exist in clone")
finally:
shutil.rmtree(tempDir)
def testRecursiveCloneNestedSubproject(self):
# make a repo to turn into a submodule
git.clone("--mirror %s %s " % (self.repo, self.repos[1]))
os.chdir(self.repo)
grapeMenu.menu().applyMenuChoice("addSubproject", ["--name=subproject1", "--prefix=subs/subproject1",
"--branch=master", "--url=%s" % self.repos[1],
| "--nested", "--noverify"])
grapeMenu.menu().applyMenuChoice("commit",["-m", "\"added subproject1\""])
print git.log("--decorate")
#Now clone the repo into a temp dir and make sure the subproject is in the clone
try:
| tempDir = tempfile.mkdtemp()
self.queueUserInput(["\n", "\n", "\n", "\n"])
args = [self.repo, tempDir, "--recursive", "--allNested"]
ret = grapeMenu.menu().applyMenuChoice("clone", args)
self.assertTrue(ret, "vine.clone returned failure")
# ensure we are on master with all nested subprojects
os.chdir(tempDir)
self.queueUserInput(["all\n"])
args = ["master", "--updateView"]
ret = grapeMenu.menu().applyMenuChoice("checkout", args)
self.assertTrue(ret, "vine.checkout master returned failure")
print git.log("--decorate")
subprojectpath = os.path.join(tempDir, "subs/subproject1")
self.assertTrue(os.path.exists(subprojectpath), "subproject1 does not exist in clone")
finally:
shutil.rmtree(tempDir)
|
modoboa/modoboa | modoboa/core/migrations/0021_localconfig_need_dovecot_update.py | Python | isc | 403 | 0 | # Generated by Dja | ngo 2.2.10 on 2020-04-29 12:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', ' | 0020_auto_20200421_0851'),
]
operations = [
migrations.AddField(
model_name='localconfig',
name='need_dovecot_update',
field=models.BooleanField(default=False),
),
]
|
jonathanverner/brython | www/tests/test_indexedDB.py | Python | bsd-3-clause | 1,790 | 0.018436 | from browser import window
_kids=['Marsha', 'Jan', 'Cindy']
def continue1(event):
_objectStore.get('Jan', onsuccess=exists, onerror=continue2)
def continue2(event):
for _kid in _kids:
| _rec={'name': _kid}
_objectStore.put(_rec, _kid, onsuccess=printmsg, onerror=printerr)
_objectStore.g | et('Jan', onsuccess=continue3, onerror=printerr)
def continue3(event):
print ("Async operations complete..")
def exists(event):
if event.target.pyresult() is None:
#handle cause of when get returns undefined if the key doesn't exist
#in the db..
continue2(event)
else:
print(event.result)
#this shouldn't get called, output message if called
print("this shouldn't get called")
def printrec(event):
_obj=event.target.pyresult()
assert isinstance(_obj, dict)
assert _obj['name']=='Jan'
def printmsg(event):
_obj=event.target.pyresult()
assert _obj in _kids
def printerr(event):
print("Error: %s" % (event.result))
def onsuccess(event):
global db
db = request.result
def onupgradeneeded(e):
print("event: ", e, "target", e.target)
print("event type: ", e.type)
print("e.oldVersion: ", e.oldVersion)
print("e.newVersion: ", e.newVersion)
# todo.. override createObjectStore to take options (ie, like OS.put)
#e.target.result.createObjectStore("BradyKids")
db = request.result
for _kid in _kids:
print(_kid, db)
_rec={'name': _kid}
req = db.put(_rec, _kid)
req.onsuccess=printmsg
req.onerror=printerr
db = None
request = window.indexedDB.open("BradyKids", 3)
request.onsuccess = onsuccess
request.onupgradeneeded=onupgradeneeded
print(db)
print("allowing async operations to complete")
|
WojciechMula/toys | fpc-compression/compress.py | Python | bsd-2-clause | 5,796 | 0.002415 | import sys
import heapq
import uvint
from pathlib import Path
from status import Status
def main():
path = Path(sys.argv[1])
out = Path(sys.argv[2])
bytes = path.read_bytes()
min_length = 4
max_length = 10
compressor = Compressor(bytes, min_length, max_length)
with open(out, 'wb') as f:
data = compressor.compress_to_file(f)
class Compressor:
def __init__(self, input, min_length, max_length):
self.input = input
self.image = Image(len(input))
self.min_length = min_length
self.max_length = max_length
def compress_to_file(self, file):
self.histogram = self._ | _build_histogram()
symbols = []
status = Status()
total = len(self.histogram)
while True:
proc = 100 * (total - len(self.histogram)) / total
status(f"Compressing {proc:.2f}% | ")
best = self.get_best()
if best is None:
break
n = len(best.word)
symbol = len(symbols)
image = self.image
for idx in best.indices:
if image.is_free(idx, n):
self.image.put_symbol(symbol, idx, n)
image.version += 1
symbols.append(best.word)
status.clear()
commands = self.__input_for_encoding()
#self.__check(symbols, commands)
self.__compress(file, symbols, commands)
def __input_for_encoding(self):
result = []
raw = b''
for c, status in zip(self.input, self.image.img):
if status is used:
continue
if status is free:
raw += c.to_bytes(1, "little")
else:
if raw:
result.append(raw)
raw = b''
result.append(status) # a symbol
if raw:
result.append(raw)
return result
def __compress(self, file, symbols, commands):
# 1a. compress symbols
for symbol in symbols:
file.write(uvint.encode(len(symbol)))
file.write(symbol)
# 1b. mark the last entry with length 0
file.write(uvint.encode(0))
# 2. compress either 1) raw substrings or 2) symbol references
for item in commands:
if type(item) is bytes:
# raw string is encoded as k=[0..127] + k*byte
while item:
k = min(len(item), 127)
file.write(uvint.encode(k))
file.write(item[:k])
item = item[k:]
else:
# symbol is encoded as 128 + symbol
file.write(uvint.encode(item + 128))
def __check(self, symbols, commands):
tmp = b''
for item in commands:
if type(item) is bytes:
tmp += item
else:
tmp += symbols[item]
assert tmp == self.input
def get_best(self):
histogram = self.histogram
while len(histogram) > 0:
best = heapq.heappop(histogram)
if not self.update_needed(best):
return best
self.update_word(best)
if len(best.indices) > 1:
heapq.heappush(histogram, best)
return None
def update_needed(self, word):
if word.version == self.image.version:
return False
length = len(word.word)
for idx in word.indices:
if not self.image.is_free(idx, length):
return True
return False
def update_word(self, word):
length = len(word.word)
word.indices = [idx for idx in word.indices if self.image.is_free(idx, length)]
word.version = self.image.version
def __build_histogram(self):
freq = {}
input = self.input
n = len(input)
status = Status()
for i in range(0, n):
status("Building histogram %d/%d (%0.2f%%)" % (i, n, 100 * i / n))
for length in range(self.min_length, self.max_length + 1):
substr = input[i:i+length]
if len(substr) != length:
break
if substr not in freq:
freq[substr] = []
freq[substr].append(i)
histogram = [Word(word, indices) for word, indices in freq.items() if len(indices) > 1]
histogram.sort()
status.clear()
return histogram
free = None
used = object()
class Image:
"""
Image tell which bytes in input was already replaced.
"""
def __init__(self, length):
self.img = [free] * length
self.version = 0
def is_free(self, idx, length):
"""
Checks if all bytes in range [idx:idx+length] are free.
"""
for i in range(idx, idx + length):
if self.img[i] is not free:
return False
return True
def put_symbol(self, symbol, idx, length):
"""
Marks range [idx:idx+length] as reserved.
The first item in the range keeps the symbol.
"""
for i in range(idx, idx + length):
self.img[i] = used
self.img[idx] = symbol
class Word:
def __init__(self, word, indices):
self.word = word
self.indices = indices
self.version = 0
@property
def weight(self):
"""
How profitable is to replace this word
"""
return len(self.indices) * len(self.word)
def __repr__(self):
return '<%s: %d [weight=%d]>' % (self.word, len(self.indices), self.weight)
def __lt__(self, other):
return self.weight > other.weight
if __name__ == '__main__':
main()
|
AnCh7/sweetshot | python3-src/steem/transactionbuilder.py | Python | unlicense | 446 | 0.002242 | from piston.transactionbuilder import TransactionBuilder as PistonTransactionBuilder
import warnings
warnings.simplefilter('always', DeprecationWarnin | g)
class TransactionBuilder( | PistonTransactionBuilder):
def __init__(self, *args, **kwargs):
warnings.warn(
"Please use the API compatible 'piston-lib' in future",
DeprecationWarning
)
super(TransactionBuilder, self).__init__(*args, **kwargs)
|
seales/spellchecker | spelling/spelling_error.py | Python | mit | 1,021 | 0.001959 |
class SpellingError:
def __init__(self, file, word, line, line_number):
"""
Takes file_path, misspelled word, line where misspelling occurs, and line number of misspelling.
:param file: str
:param word: str
:param line: str
:param line_number: int
"""
self.__file = file
self.__word = word
self.__line = line
self.__line_number = line_number
@property
def file(self):
return self.__file
@file.setter
def file(self, file):
self.__file = file
@property
def word(self):
return self.__word
@word.setter
def word(self, word):
self.__word = word
@property
def line(self):
return self.__line
@line.setter
def line(self, line):
self.__line = line
@property
def line_number(self):
return self.__line_number
@line_number | .setter
def line_number(self, line_number):
self.__line_n | umber = line_number
|
iulian787/spack | var/spack/repos/builtin/packages/py-terminado/package.py | Python | lgpl-2.1 | 1,010 | 0.005941 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyTerminado(PythonPackage):
"""Terminals served to term.js using Tornado websockets"""
homepage = "https://pypi.python.org/pypi/terminado"
url = "https://pypi.io/packages/source/t/terminado/terminado-0.8.3.tar.gz"
version('0.8.3', sha256='4804a774f802306a7d9af7322193c5390f1da0abb429e082a10ef1d46e6fb2c2')
version('0.8.2', sha256='de08e141f83c3a0798b050ecb097ab6259c3f0331b2f7b7750c9075ced2c20c2')
version('0.8.1', sha256='55abf9ade563b8f9be1f34e4233c7b7bde726059947a593322e8a553cc4c067a')
version('0.6', sha256='2c0ba1f624067dccaaead7d2247cfe029806355cef124dc2ccb53c83229f0126')
depends_on('py-tornado@4:', type=('build', 'run'))
depend | s_on('py-ptyprocess', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:', when='@0.8.2:', type=(' | build', 'run'))
|
krafczyk/spack | var/spack/repos/builtin/packages/libfs/package.py | Python | lgpl-2.1 | 1,873 | 0.000534 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libfs(AutotoolsPackage):
"""libFS - X Font Service client library.
This library is used by clients of X Font Servers (xfs), | such as
xfsinfo, fslsfonts, and the X servers themselves."""
homepage = "http://cgit.freedesktop.org/xorg/lib/libFS"
url = "htt | ps://www.x.org/archive/individual/lib/libFS-1.0.7.tar.gz"
version('1.0.7', 'd8c1246f5b3d0e7ccf2190d3bf2ecb73')
depends_on('xproto@7.0.17:', type='build')
depends_on('fontsproto', type='build')
depends_on('xtrans', type='build')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
|
oscaro/django | tests/field_deconstruction/tests.py | Python | bsd-3-clause | 17,552 | 0.000855 | from __future__ import unicode_literals
import warnings
from django.db import models
from django.test import TestCase, override_settings
from django.utils import six
class FieldDeconstructionTests(TestCase):
"""
Tests the deconstruct() method on all core fields.
"""
def test_name(self):
"""
Tests the outputting of the correct name if assigned one.
"""
# First try using a "normal" field
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("is_awesome_test")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "is_awesome_test")
self.assertIsInstance(name, six.text_type)
# Now try with a ForeignKey
field = models.ForeignKey("some_fake.ModelName")
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("author")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "author")
def test_auto_field(self):
field = models.AutoField(primary_key=True)
field.set_attributes_from_name("id")
| name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.AutoField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"primary_key": True})
def test_big_integer_field(self):
field = models.BigIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assert | Equal(path, "django.db.models.BigIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_boolean_field(self):
field = models.BooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.BooleanField(default=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"default": True})
def test_char_field(self):
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65})
field = models.CharField(max_length=65, null=True, blank=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True})
def test_csi_field(self):
field = models.CommaSeparatedIntegerField(max_length=100)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 100})
def test_date_field(self):
field = models.DateField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now": True})
def test_datetime_field(self):
field = models.DateTimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateTimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True})
# Bug #21785
field = models.DateTimeField(auto_now=True, auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True})
def test_decimal_field(self):
field = models.DecimalField(max_digits=5, decimal_places=2)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2})
def test_decimal_field_0_decimal_places(self):
"""
A DecimalField with decimal_places=0 should work (#22272).
"""
field = models.DecimalField(max_digits=5, decimal_places=0)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0})
def test_email_field(self):
field = models.EmailField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 75})
field = models.EmailField(max_length=255)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 255})
def test_file_field(self):
field = models.FileField(upload_to="foo/bar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar"})
# Test max_length
field = models.FileField(upload_to="foo/bar", max_length=200)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200})
def test_file_path_field(self):
field = models.FilePathField(match=".*\.txt$")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"match": ".*\.txt$"})
field = models.FilePathField(recursive=True, allow_folders=True, max_length=123)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123})
def test_float_field(self):
field = models.FloatField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FloatField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_foreign_key(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.ForeignKey("auth.Permission")
field.rel.to = Permission
field.rel.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.ForeignKey("auth.User")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User"})
self.assertEq |
UU-Hydro/PCR-GLOBWB_model | model/deterministic_runner.py | Python | gpl-3.0 | 6,069 | 0.012034 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PCR-GLOBWB (PCRaster Global Water Balance) Global Hydrological Model
#
# Copyright (C) 2016, Edwin H. Sutanudjaja, Rens van Beek, Niko Wanders, Yoshihide Wada,
# Joyce H. C. Bosmans, Niels Drost, Ruud J. van der Ent, Inge E. M. de Graaf, Jannis M. Hoch,
# Kor de Jong, Derek Karssenberg, Patricia López López, Stefanie Peßenteiner, Oliver Schmitz,
# Menno W. Straatsma, Ekkamol Vannametee, Dominik Wisser, and Marc F. P. Bierkens
# Faculty of Geosciences, Utrecht University, Utrecht, The Netherlands
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from pcraster.framework import DynamicModel
from pcraster.framework import DynamicFramework
from configuration import Configuration
from currTimeStep import ModelTime
from reporting import Reporting
from spinUp import SpinUp
from pcrglobwb import PCRGlobWB
import logging
logger = logging.getLogger(__name__)
import oldcalc_framework
import disclaimer
class DeterministicRunner(DynamicModel):
def __init__(self, configuration, modelTime, initialState = None):
DynamicModel.__init__(self)
self.modelTime = modelTime
self.model = PCRGlobWB(configuration, modelTime, initialState)
self.reporting = Reporting(configuration, self.model, modelTime)
def initial(self):
pass
def dynamic(self):
# re-calculate current model time using current pcraster timestep value
self.modelTime.update(self.currentTimeStep())
# update model (will pick up current model time from model time object)
self.model.read_forcings()
self.model.update(report_water_balance=True)
#do any needed reporting for this time step
self.reporting.report()
def main():
# print disclaimer
disclaimer.print_disclaimer()
# get the full path of configuration/ini file given in the system argument
iniFileName = os.path.abspath(sys.argv[1])
# debug option
debug_mode = False
if len(sys.argv) > 2:
if sys.argv[2] == "debug": debug_mode = True
# object to handle configuration/ini file
configuration = Configuration(iniFileName = iniFileName, \
debug_mode = debug_mode)
# timeStep info: year, month, day, doy, hour, etc
currTimeStep = ModelTime()
# object for spin_up
spin_up = SpinUp(configuration)
# spinningUp
noSpinUps = int(configuration.globalOptions['maxSpinUpsInYears'])
initial_state = None
if noSpinUps > 0:
logger.info('Spin-Up #Total Years: '+str(noSpinUps))
spinUpRun = 0 ; has_converged = False
while spinUpRun < noSpinUps and has_converged == False:
spinUpRun += 1
currTimeStep.getStartEndTimeStepsForSpinUp(
configuration.globalOptions['startTime'],
spinUpRun, noSpinUps)
logger.info('Spin-Up Run No. '+str(spinUpRun))
deterministic_runner = DeterministicRunner(configuration, currTimeStep, initial_state)
all_state_begin = deterministic_runner.model.getAllState()
dynamic_framework = DynamicFramework(deterministic_runner,currTimeStep.nrOfTimeSteps)
dynamic_framework.setQuiet(True)
dynamic_framework.run()
all_state_end = deterministic_runner.model.getAllState()
has_converged = spin_up.checkConvergence(all_state_begin, all_state_end, spinUpRun, deterministic_runner.model.routing.cellArea)
initial_state = deterministic_runner.model.getState()
# Running the deterministic_runner (excluding DA scheme)
currTimeStep.getStartEndTimeSteps(configuration.globalOptions['startTime'],
configuration.globalOptions['endTime'])
logger.info('Transient simulation run started.')
deterministic_runner = DeterministicRunner(configuration, currTimeStep, initial_state)
dynamic_framework = DynamicFramework(deterministic_runner,currTimeStep.nrOfTimeSteps)
dynamic_framework.setQuiet(True)
dynamic_framework.run()
# for debugging to PCR-GLOBWB version one
if configuration.debug_to_version_one:
logger.info('\n\n\n\n\n'+'Executing PCR-GLOBWB version 1.'+'\n\n\n\n\n')
# reset modelTime object
currTimeStep = None; currTimeStep = ModelTime()
currTimeStep.getStartEndTimeSteps(configuration.globalOptions['startTime'],
configuration.globalOptions['endTime'])
# execute PCR-GLOBWB version 1
# - including comparing model outputs (from versions one and two)
pcrglobwb_one = oldcalc_framework.PCRGlobWBVersionOne(configuration, \
currTimeStep, \
deterministic_runner.model.routing.landmask, \
deterministic_runner.model.routing.cellArea)
dynamic_framework = DynamicFramework(pcrglobwb_one, currTimeStep.nrOfTimeSteps)
dynamic_framework.setQuiet(True)
| dynamic_framework.run()
if __name__ == '__m | ain__':
# print disclaimer
disclaimer.print_disclaimer(with_logger = True)
sys.exit(main())
|
degoldcode/PyNaviSim | objects.py | Python | gpl-3.0 | 424 | 0.03066 | from vec2d import vec2d
from math import e, exp, pi, c | os, sin, sqrt, atan2
class Goal:
def __init__(self, pos):
self.pos= pos
self.size = 8
self.theta = atan2(pos.y,pos.x)
class Landmark:
def __init__(self):
self.pos= vec2d(0,0)
self.size | = 4
class Pipe:
def __init__(self):
self.pos0= vec2d(0,0)
self.pos1= vec2d(0,0)
self.width = 3 |
zzcclp/carbondata | python/pycarbon/tests/mnist/dataset_with_unischema/tests/conftest.py | Python | apache-2.0 | 1,952 | 0.007684 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable | law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permis | sions and
# limitations under the License.
import numpy as np
import pytest
from pycarbon.tests import DEFAULT_CARBONSDK_PATH
MOCK_IMAGE_SIZE = (28, 28)
MOCK_IMAGE_3DIM_SIZE = (28, 28, 1)
SMALL_MOCK_IMAGE_COUNT = {
'train': 30,
'test': 5
}
LARGE_MOCK_IMAGE_COUNT = {
'train': 600,
'test': 100
}
class MockDataObj(object):
""" Wraps a mock image array and provide a needed getdata() interface function. """
def __init__(self, a):
self.a = a
def getdata(self):
return self.a
def _mock_mnist_data(mock_spec):
"""
Creates a mock data dictionary with train and test sets, each containing 5 mock pairs:
``(random images, random digit)``.
"""
bogus_data = {
'train': [],
'test': []
}
for dset, data in bogus_data.items():
for _ in range(mock_spec[dset]):
pair = (MockDataObj(np.random.randint(0, 255, size=MOCK_IMAGE_SIZE, dtype=np.uint8)),
np.random.randint(0, 9))
data.append(pair)
return bogus_data
@pytest.fixture(scope="session")
def small_mock_mnist_data():
return _mock_mnist_data(SMALL_MOCK_IMAGE_COUNT)
@pytest.fixture(scope="session")
def large_mock_mnist_data():
return _mock_mnist_data(LARGE_MOCK_IMAGE_COUNT)
|
lsst/sims_catalogs_measures | tests/testMethodRegistry.py | Python | gpl-3.0 | 2,280 | 0.00614 | from __future__ import with_statement
import unittest
import lsst.utils.tests as utilsTests
from lsst.sims.catalogs.measures.instance im | port register_class, register_method
@register_class
class ClassA(object):
def call(self, key):
return self._methodRegistry[key](self)
@register_method('a')
def _a_method(self):
return 'a'
@register_class
class ClassB(ClassA):
@register_method('b')
def _b_method(self):
return 'b'
@register_class
class ClassC(ClassB):
| @register_method('c')
def _c_method(self):
return 'c'
@register_class
class ClassD(ClassA):
@register_method('d')
def _d_method(self):
return 'd'
class MethodRegistryTestCase(unittest.TestCase):
def testMethodInheritance(self):
"""
Test that the register_class and register_method decorators
behave appropriately and preserve inheritance.
"""
aa = ClassA()
self.assertTrue(aa.call('a')=='a')
# below, we test to make sure that methods which
# should not be in ClassA's _methodRegistry are not
# spuriously added to the registry
self.assertRaises(KeyError, aa.call, 'b')
self.assertRaises(KeyError, aa.call, 'c')
self.assertRaises(KeyError, aa.call, 'd')
bb = ClassB()
self.assertTrue(bb.call('a')=='a')
self.assertTrue(bb.call('b')=='b')
self.assertRaises(KeyError, bb.call, 'c')
self.assertRaises(KeyError, bb.call, 'd')
cc = ClassC()
self.assertTrue(cc.call('a')=='a')
self.assertTrue(cc.call('b')=='b')
self.assertTrue(cc.call('c')=='c')
self.assertRaises(KeyError, cc.call, 'd')
dd = ClassD()
self.assertTrue(dd.call('a')=='a')
self.assertTrue(dd.call('d')=='d')
self.assertRaises(KeyError, dd.call, 'b')
self.assertRaises(KeyError, dd.call, 'c')
def suite():
"""Returns a suite containing all the test cases in this module."""
utilsTests.init()
suites = []
suites += unittest.makeSuite(MethodRegistryTestCase)
return unittest.TestSuite(suites)
def run(shouldExit=False):
"""Run the tests"""
utilsTests.run(suite(), shouldExit)
if __name__ == "__main__":
run(True)
|
deepmind/deep-verify | deep_verify/src/formulations/semidefinite/__init__.py | Python | apache-2.0 | 1,083 | 0 | # coding=utf-8
# Copyright 2019 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www | .apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions an | d
# limitations under the License.
"""Semidefinite verification formulation.
Going beyond the 'standard' formulation (`src.formulations.standard`), this
module implements a tighter Lagrangian relaxation based on semidefinite
programming.
For more details see paper: "Efficient Neural Network Verification with
Exactness Characterization.",
http://auai.org/uai2019/proceedings/papers/164.pdf.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
jriehl/numba | examples/laplace2d/laplace2d-pa.py | Python | bsd-2-clause | 1,199 | 0.006672 | #!/usr/bin/env python
from __future__ import print_function
import time
import numpy as np
from numba import jit, stencil
@stencil
def jacobi_kernel(A):
return 0.25 * (A[0,1] + A[0,-1] + A[-1,0] + A[1,0])
@jit(parallel=True)
def jacobi_relax_core(A, Anew):
error = 0.0
n = A.shape[0]
m = A.shape[1]
Anew = jacobi_kernel(A)
| error = np.max(np.abs(Anew - A))
return error
def main():
NN = 3000
NM = 3000
A = np.zeros((NN, NM), dtype=np.float64)
Anew = np.zeros((NN, NM), dtype=np.float64)
n = NN
m = NM
iter_max = 1000
tol = 1.0e-6
error = 1.0
for j in range(n):
A[j, 0] = 1.0
Anew[j, 0] = 1.0
print("Jacobi relaxation Calculation: %d x %d mesh" % (n, m))
timer = time.time()
| iter = 0
while error > tol and iter < iter_max:
error = jacobi_relax_core(A, Anew)
# swap A and Anew
tmp = A
A = Anew
Anew = tmp
if iter % 100 == 0:
print("%5d, %0.6f (elapsed: %f s)" % (iter, error, time.time()-timer))
iter += 1
runtime = time.time() - timer
print(" total: %f s" % runtime)
if __name__ == '__main__':
main()
|
a1ezzz/wasp-backup | wasp_backup/file_archiver.py | Python | lgpl-3.0 | 2,627 | 0.012562 | # -*- coding: utf-8 -*-
# wasp_backup/file_archiver.py
#
# Copyright (C) 2017 the wasp-backup authors and contributors
# <see AUTHORS file>
#
# This file is part of wasp-backup.
#
# wasp-backup is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wasp-backup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with wasp-backup. If not, see <http://www.gnu.org/lice | nses/>.
# TODO: document the code
# TODO: write tests for the code
# noinspection PyUnresolvedReferences
from wasp_backup.version import __author__, __version__, __credits__, __license__, __copyright__, __email__
# noinspection PyUnresolvedReferences
from wasp_backup.version import __status__
import io
from wasp_general.verify import verify_type, verify_value
from wasp_backup.cipher import WBackupCipher
from wasp_b | ackup.core import WBackupMeta
from wasp_backup.archiver import WBasicArchiveCreator
class WFileArchiveCreator(WBasicArchiveCreator):
@verify_type('paranoid', archive_path=str, io_write_rate=(float, int, None))
@verify_value('paranoid', archive_path=lambda x: len(x) > 0, io_write_rate=lambda x: x is None or x > 0)
@verify_type(cipher=(WBackupCipher, None), compression_mode=(WBackupMeta.Archive.CompressionMode, None))
def __init__(
self, backup_source, archive_path, logger, stop_event=None, io_write_rate=None, compression_mode=None,
cipher=None, buffer_size=io.DEFAULT_BUFFER_SIZE
):
WBasicArchiveCreator.__init__(
self, archive_path, logger, stop_event=stop_event, io_write_rate=io_write_rate,
compression_mode=compression_mode, cipher=cipher
)
self.__backup_source = backup_source
self.__buffer_size = buffer_size
def backup_source(self):
return self.__backup_source
def buffer_size(self):
return self.__buffer_size
def write_archive(self, fo, archive):
backup_source = self.backup_source()
buffer_size = self.buffer_size()
read_buffer = backup_source.read(buffer_size)
while len(read_buffer) > 0:
fo.write(read_buffer)
read_buffer = backup_source.read(buffer_size)
def meta(self):
result = WBasicArchiveCreator.meta(self)
result[WBackupMeta.Archive.MetaOptions.archived_files] = self.backup_source()
return result
|
dakcarto/QGIS | python/plugins/processing/algs/saga/versioncheck.py | Python | gpl-2.0 | 2,449 | 0 | import os
import subprocess
def getAlgParams(f):
params = []
booleanparams = []
numparams = []
lines = open(f)
line = lines.readline().strip('\n').strip()
name = line
if '|' in name:
tokens = name.split('|')
cmdname = tokens[1]
else:
cmdname = name
line = lines.readline().strip('\n').strip()
group = line
line = lines.readline().strip('\n').strip()
while line != '':
if line.startswith('Hardcoded'):
pass
elif line.startswith('AllowUnmatching'):
pass
elif line.startswith('Extent'):
extentParamNames = line[6:].strip().split(' ')
params.extend(["-" + p for p in extentParamNames])
else:
tokens = line.split("|")
if tokens[0] == "ParameterBoolean":
booleanparams.append("-" + tokens[1].strip())
elif tokens[0] == "ParameterNumber":
numparams.append("-" + tokens[1].strip())
else:
params.append("-" + tokens[1])
line = lines.readline().strip('\n').strip()
lines.close()
return cmdname, group, params, booleanparams, numparams
def testDescriptionFile(f):
usage = ""
cmdname, group, params, booleanparams, numparams = getAlgParams(f)
command = [r'd:\saga2.1.2\saga_cmd.exe', group, cmdname]
for p in params:
command.append(p)
command.append("dummy")
for p in numparams:
command.append(p)
command.append("0")
command.extend(booleanparams)
proc = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stdin=open(os.devnull),
stderr=subprocess.STDOUT,
universal_newlines=True,
).stdout
lines = []
for line in iter(proc.readline, ''):
lines.append(line)
if "Usage" in line:
usage = line
if usage and not lines[0] | .startswith("_"):
print "-" * 50
print | f + " [ERROR]"
print lines
print usage
print "Name in description:" + cmdname
print "Parameters in description:" + unicode(params)
print "-" * 50
print
if __name__ == '__main__':
folder = os.path.join(os.path.dirname(__file__), "description")
for descriptionFile in os.listdir(folder):
if descriptionFile.endswith('txt'):
testDescriptionFile(os.path.join(folder, descriptionFile))
|
miniworld-project/miniworld_core | miniworld/model/network/linkqualitymodels/LinkQualityModelRange.py | Python | mit | 6,220 | 0.002412 | from miniworld import log
from miniworld.Scenario import scenario_config
from miniworld.model.network.linkqualitymodels import LinkQualityModel, LinkQualityConstants
__author__ = 'Nils Schmidt'
class LinkQualityModelRange(LinkQualityModel.LinkQualityModel):
#####################################################
# Implement these methods in a subclass
#####################################################
# TODO:
def _distance_2_link_quality(self, distance):
default_link_quality = {}
if 0 <= distance < 30:
default_link_quality.update({
LinkQualityConstants.LINK_QUALITY_KEY_LOSS: 0
})
return True, default_link_quality
return False, None
# TODO: extract
class LinkQualityModelNetEm(LinkQualityModel.LinkQualityModel):
# TODO:
NETEM_KEY_LOSS = "loss"
NETEM_KEY_LIMIT = "limit"
NETEM_KEY_DELAY = "delay"
NETEM_KEY_CORRUPT = "corrupt"
NETEM_KEY_DUPLICATE = "duplicate"
NETEM_KEY_REORDER = "reorder"
NETEM_KEY_RATE = "rate"
# order of options that netem needs
NETEM_KEYS = (
NETEM_KEY_LIMIT, NETEM_KEY_DELAY, NETEM_KEY_LOSS, NETEM_KEY_CORRUPT, NETEM_KEY_DUPLICATE, NETEM_KEY_REORDER,
NETEM_KEY_RATE)
class LinkQualityModelWiFiLinear(LinkQualityModelNetEm):
MAX_BANDWIDTH = 54000
log.info("max_bandwidth: %s" % MAX_BANDWIDTH)
#####################################################
# Implement these methods in a subclass
#####################################################
def _distance_2_link_quality(self, distance):
distance = distance * 1.0
default_link_quality = \
{self.NETEM_KEY_LOSS: None,
self.NETEM_KEY_LIMIT: None,
self.NETEM_KEY_DELAY: None,
self.NETEM_KEY_CORRUPT: None,
self.NETEM_KEY_DUPLICATE: None,
self.NETEM_KEY_REORDER: None,
self.NETEM_KEY_RATE: None
}
# distribute bandwidth linear for dist in [0, 30)
# TODO: other way than defining maximum bandwidth?
max_bandwidth = scenario_config.get_link_bandwidth() or self.MAX_BANDWIDTH
distance += 1
if distance >= 0:
distance = distance / 2
if distance >= 0:
bandwidth = 1.0 * max_bandwidth / distance if distance > 1 else max_bandwidth
default_link_quality[LinkQualityConstants.LINK_QUALITY_KEY_BANDWIDTH] = bandwidth
delay_const = (distance - 1) * 2 if distance > 1 else 0
delay_const_str = '%.2f' % delay_const
delay_variation = delay_const / 10.0
delay_variation_str = '%.2f' % delay_variation
delay_cmd = "{delay_const}ms {delay_var}ms 25%".format(delay_const=delay_const_str,
| delay_var=delay_variation_str)
# delay_cmd = "{delay_const} {delay_var} distribution normal".format(delay_const=delay_const, delay_var=delay_variation)
default_link_quality[self.NETEM_KEY_DELAY] = delay_cmd
# return bandwidth, delay_const, delay_variation
if bandwidth >= 1000:
return True, default_link_quality |
return False, default_link_quality
class LinkQualityModelWiFiExponential(LinkQualityModelWiFiLinear):
#####################################################
# Implement these methods in a subclass
#####################################################
# TODO: Abstract!
def _distance_2_link_quality(self, distance):
"""
"""
distance = distance * 1.0
default_link_quality = \
{self.NETEM_KEY_LOSS: None,
self.NETEM_KEY_LIMIT: None,
self.NETEM_KEY_DELAY: None,
self.NETEM_KEY_CORRUPT: None,
self.NETEM_KEY_DUPLICATE: None,
self.NETEM_KEY_REORDER: None,
self.NETEM_KEY_RATE: None
}
# distribute bandwidth linear for dist in [0, 30)
# TODO: other way than defining maximum bandwidth?
max_bandwidth = scenario_config.get_link_bandwidth() or self.MAX_BANDWIDTH
if distance >= 0:
bandwidth_divisor = 2 ** int(distance / 4.0)
bandwidth = 1.0 * max_bandwidth / bandwidth_divisor if distance >= 1 else max_bandwidth
default_link_quality[LinkQualityConstants.LINK_QUALITY_KEY_BANDWIDTH] = bandwidth
delay_const = bandwidth_divisor
delay_const_str = '%.2f' % delay_const
delay_variation = delay_const / 10.0
delay_variation_str = '%.2f' % delay_variation
delay_cmd = "{delay_const}ms {delay_var}ms 25%".format(delay_const=delay_const_str,
delay_var=delay_variation_str)
# delay_cmd = "{delay_const} {delay_var} distribution normal".format(delay_const=delay_const, delay_var=delay_variation)
default_link_quality[self.NETEM_KEY_DELAY] = delay_cmd
# return bandwidth, delay_const, delay_variation
if bandwidth >= 1000:
return True, default_link_quality
return False, default_link_quality
if __name__ == '__main__':
# print LinkQualityModelWiFi().distance_2_link_quality(0)
# print LinkQualityModelWiFi().distance_2_link_quality(0.5)
# print LinkQualityModelWiFi().distance_2_link_quality(1)
# print LinkQualityModelWiFi().distance_2_link_quality(1.1)
# print LinkQualityModelWiFi().distance_2_link_quality(2.1)
# print LinkQualityModelWiFi().distance_2_link_quality(3)
# print LinkQualityModelWiFi().distance_2_link_quality(100)
values = []
for x in range(0, 30):
vals1 = LinkQualityModelWiFiLinear()._distance_2_link_quality(x)
vals2 = LinkQualityModelWiFiExponential()._distance_2_link_quality(x)
values.append([x] + list(vals1) + list(vals2))
print('\n'.join(
[("\\trowgray\n" if val[0] % 2 == 0 else "") + "\\hline\n%s & %.00f & %s & %s & %.00f & %s & %s \\\\" % tuple(
val) for val in values]))
|
ryonsherman/rcbot | utils/text.py | Python | mit | 190 | 0 | #!/usr/bin/e | nv python2
class style:
@staticmethod
def bold(text):
return "\x02%s\x02" % text
@staticmethod
def underline(text):
return "\x1F%s\x1F" % tex | t
|
geraldoandradee/pytest | testing/test_assertion.py | Python | mit | 11,084 | 0.001804 | import sys
import py, pytest
import _pytest.assertion as plugin
from _pytest.assertion import reinterpret, util
needsnewassert = pytest.mark.skipif("sys.version_info < (2,6)")
@pytest.fixture
def mock_config():
class Config(object):
verbose = False
def getoption(self, name):
if name == 'verbose':
return self.verbose
raise KeyError('Not mocked out: %s' % name)
return Config()
def interpret(expr):
return reinterpret.reinterpret(expr, py.code.Frame(sys._getframe(1)))
class TestBinReprIntegration:
pytestmark = needsnewassert
def test_pytest_assertrepr_compare_called(self, testdir):
testdir.makeconftest("""
l = []
def pytest_assertrepr_compare(op, left, right):
l.append((op, left, right))
def pytest_funcarg__l(request):
return l
""")
testdir.makepyfile("""
def test_hello():
assert 0 == 1
def test_check(l):
assert l == [("==", 0, 1)]
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*test_hello*FAIL*",
"*test_check*PASS*",
])
def callequal(left, right, verbose=False):
config = mock_config()
config.verbose = verbose
return plugin.pytest_assertrepr_compare(config, '==', left, right)
class TestAssert_reprcompare:
def test_different_types(self):
assert callequal([0, 1], 'foo') is None
def test_summary(self):
summary = callequal([0, 1], [0, 2])[0]
assert len(summary) < 65
def test_text_diff(self):
diff = callequal('spam', 'eggs')[1:]
assert '- spam' in diff
assert '+ eggs' in diff
def test_text_skipping(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs')
assert 'Skipping' in lines[1]
for line in lines:
assert 'a'*50 not in line
def test_text_skipping_verbose(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs', verbose=True)
assert '- ' + 'a'*50 + 'spam' in lines
assert '+ ' + 'a'*50 + 'eggs' in lines
def test_multiline_text_diff(self):
left = 'foo\nspam\nbar'
right = 'foo\neggs\nbar'
diff = callequal(left, right)
assert '- spam' in diff
assert '+ eggs' in diff
def test_list(self):
expl = callequal([0, 1], [0, 2])
assert len(expl) > 1
def test_list_different_lenghts(self):
expl = callequal([0, 1], [0, 1, 2])
assert len(expl) > 1
expl = callequal([0, 1, 2], [0, 1])
assert len(expl) > 1
def test_dict(self):
expl = callequal({'a': 0}, {'a': 1})
assert len(expl) > 1
def test_dict_omitting(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1})
assert lines[1].startswith('Omitting 1 identical item')
assert 'Common items' not in lines
for line in lines[1:]:
assert 'b' not in line
def test_dict_omitting_verbose(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1}, verbose=True)
assert lines[1].startswith('Common items:')
assert 'Omitting' not in lines[1]
assert lines[2] == "{'b': 1}"
def test_set(self):
expl = callequal(set([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_frozenzet(self):
expl = callequal(frozenset([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_Sequence(self):
col = py.builtin._tryimport(
"collections.abc",
"collections",
"sys")
if not hasattr(col, "MutableSequence"):
pytest.skip("cannot import MutableSequence")
MutableSequence = col.MutableSequence
class TestSequence(MutableSequence): # works with a Sequence subclass
def __init__(self, iterable):
self.elements = list(iterable)
def __getitem__(self, item):
return self.elements[item]
def __len__(self):
return len(self.elements)
def __setitem__(self, item, value):
pass
def __delitem__(self, item):
pass
def insert(self, item, index):
pass
expl = callequal(TestSequence([0, 1]), list([0, 2]))
assert len(expl) > 1
def test_list_tuples(self):
expl = callequal([], [(1,2)])
assert len(expl) > 1
expl = callequal([(1,2)], [])
assert len(expl) > 1
def test_list_bad_repr(self):
class A:
def __repr__(self):
raise ValueError(42)
expl = callequal([], [A()])
assert 'ValueError' in "".join(expl)
expl = callequal({}, {'1': A()})
assert 'faulty' in "".join(expl)
def test_one_repr_empty(self):
"""
the faulty empty string repr did trigger
a unbound local error in _diff_text
"""
class A(str):
def __repr__(self):
return ''
expl = callequal(A(), '')
assert not expl
def test_repr_no_exc(self):
expl = ' '.join(callequal('foo', 'bar'))
assert 'raised in repr()' not in expl
def test_python25_compile_issue257(testdir):
testdir.makepyfile("""
def test_rewritten():
assert 1 == 2
# some comment
""")
result = testdir.runpytest()
assert result.ret == 1
result.stdout.fnmatch_lines("""
*E*assert 1 == 2*
*1 failed*
""")
@needsnewassert
def test_rewritten(testdir):
testdir.makepyfile("""
def test_rewritten():
assert "@py_builtins" in globals()
""")
assert testdir.runpytest().ret == 0
def test_reprcompare_notin(mock_config):
detail = plugin.pytest_assertrepr_compare(
mock_config, 'not in', 'foo', 'aaafoobbb')[1:]
assert detail == ["'foo' is contained here:", ' aaafoobbb', '? +++']
@needsnewassert
def test_pytest_assertrepr_compare_integration(testdir):
testdir.makepyfile("""
def test_hello():
x = set(range(100))
y = x.copy()
y.remove(50)
assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*50*",
])
@needsnewassert
def test_sequence_comparison_uses_repr(testdir):
testdir.makepyfile("""
def test_hello():
x = set("hello x")
y = set("hello y")
| assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*'x'*",
"*E*Extra items*right*",
| "*E*'y'*",
])
@pytest.mark.xfail("sys.version_info < (2,6)")
def test_assert_compare_truncate_longmessage(testdir):
testdir.makepyfile(r"""
def test_long():
a = list(range(200))
b = a[::2]
a = '\n'.join(map(str, a))
b = '\n'.join(map(str, b))
assert a == b
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*truncated*use*-vv*",
])
result = testdir.runpytest('-vv')
result.stdout.fnmatch_lines([
"*- 197",
])
@needsnewassert
def test_assertrepr_loaded_per_dir(testdir):
testdir.makepyfile(test_base=['def test_base(): assert 1 == 2'])
a = testdir.mkdir('a')
a_test = a.join('test_a.py')
a_test.write('def test_a(): assert 1 == 2')
a_conftest = a.join('conftest.py')
a_conftest.write('def pytest_assertrepr_compare(): return ["summary a"]')
b = testdir.mkdir('b')
b_test = b.join('test_b.py')
b_test.write('def test_b(): assert 1 == 2')
b_conftest = b.join('conftest.py')
b_conftest.write('def pytest_assertrepr_compare(): return ["summary b"]')
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*def test_base():*',
'*E*assert 1 == 2*',
' |
aneumeier/userprofile | userprofile/mail.py | Python | mit | 494 | 0 | from django.conf import settings
from django.core.mail import send_mail
from django.core.urlresol | vers import reverse
def send_validation(strategy, backend, code):
url = '{0}?verification_code={1}'.format(
reverse('social:complete', args=(backend.name,)),
code.code
)
url = strategy.request.build_absolute_uri(url)
send_mail('Validate your account', 'Validate your account {0}'.format(url),
settings.EMAIL_FRO | M, [code.email], fail_silently=False)
|
kensonman/webframe | management/commands/pref.py | Python | apache-2.0 | 18,742 | 0.031267 | # -*- coding: utf-8 -*-
#
# File: src/webframe/management/commands/pref.py
# Date: 2020-04-22 21:35
# Author: Kenson Man <kenson@kenson.idv.hk>
# Desc: Import / Create / Update / Delete preference
#
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.db.models import Q
from pathlib import Path
from webframe.functions import TRUE_VALUES, LogMessage as lm, getTime
from webframe.models import Preference, AbstractPreference
from uuid import UUID
import logging, os, glob, sys, re
logger=logging.getLogger('webframe.commands.prefs')
class Command(BaseCommand):
help = '''Mainpulate the preference in database. Including insert/update/delete/view/import/gensecret/gendoc; Importing support csv|xlsx file.'''
def __getIndent__(self, indent=0, ch=' '):
return ch*indent
def create_parser(self, cmdName, subcommand, **kwargs):
parser=super().create_parser(cmdName, subcommand, **kwargs)
parser.epilog='''Example:\r\n
\tpref import path_to_prefs #Import a folder or a csv/xlsx file\r\n
\tpref set ABC --value="def" #Set the preference "ABC" to value "def"\r\n
\tpref gensecret #Generate the encryption secret; PLEASE backup in secure way.\r\n
\tpref gendoc prefsDoc.html #Generate the documentation and save as as output.html
'''
return parser
def add_arguments(self, parser):
#Default Value
pattern='Pref({pref.id}:{pref.name}): {pref.value}'
action='show'
max=256
wildcard='*'
tmpl='webframe/prefsDoc.html'
#Adding arguments
parser.add_argument('action', type=str, help='The action to be taken. One of import/export/show/set/delete/gensecret/gendoc; Default is {0}'.format(action), default=action)
parser.add_argument('name', type=str, nargs='?', help='[import/export/show/set/delete/gendoc]; The name of the preference or path of importing/exporting file (csv|xlsx);')
parser.add_argument('--file', dest='file', type=str, help='[import/export/gendoc]; The file path for import/export/output.')
parser.add_argument('--value', dest='value', type=str, help='[set/delete]; The value of the preference;', default=None)
parser.add_argument('--owner', dest='owner', type=str, help='[set/delete]; The owner of the preference; Optional;', default=None)
parser.add_argument('--noowner', dest='noowner', action='store_true', help='[show/set/delete]; The target preference has no owner; Optional; Default False')
parser.add_argument('--parent', dest='parent', type=str, help='[show/set/delete]; The parent\'s name of the preference. Optional;', default=None)
parser.add_argument('--noparent', dest='noparent', action='store_true', help='[show/set/delete]; The target preference has no parent; Optional; Default False')
parser.add_argument('--pattern', dest='pattern', type=str, help='[show]; The output pattern. {0}'.format(pattern), default=pattern)
parser.add_argument('--max', dest='max', type=int, help='[show]; The maximum number of preference to show. Default is {0}'.format(max), default=max)
parser.add_argument('--wildcard', dest='wildcard', type=str, help='[show]; Specify the wildcard; Default is {0}'.format(wildcard), default=wildcard)
#Importing
parser.add_argument('--sep', dest='separator', type=str, default=',', help='[import]; The separator when CSV importing; Default \",\"')
parser.add_argument('--encoding', dest='encoding', type=str, default='utf-8', help='[import]; The encoding when CSV importing; Default \"utf-8\"')
parser.add_argument('--quotechar', dest='quotechar', type=str, default='\"', help='[import]; The quote-char when CSV importing; Default double quote: \"')
parser.add_argument('--filepath', dest=' | filepath', action='store_true', help='[import]; Import the file-path in preferences; Default False')
parser.add_argument('--force', '-f ', dest='force', action='store_true', help='[import]; Force the import', default=False)
#Generate Doc
parser.add_argument('--tmpl', dest='tm | pl', type=str, help="[gendoc]; The template name when generating document; Default: {0}".format(tmpl), default=tmpl)
def __get_owner__(self, owner=None):
if not owner: return None
logger.debug('Getting owner by: "%s"', owner)
owner=owner if owner else self.kwargs['owner']
return get_user_model().objects.get(username=owner) if owner else None
def __get_parent__(self, parent=None):
parent=parent if parent else self.kwargs['parent']
if parent:
try:
#Get parent by uuid
return Preference.objects.get(id=parent)
except:
try:
#Get parent by name
return Preference.objects.get(name=parent)
except:
pass
return None
def __get_pref__(self, **kwargs):
owner=kwargs['owner'] if 'owner' in kwargs else self.__get_owner__()
parent=kwargs['parent'] if 'parent' in kwargs else self.__get_parent__()
name=kwargs['name'] if 'name' in kwargs else self.kwargs['name']
lang=kwargs['lang'] if 'lang' in kwargs else None
if self.kwargs['filepath']: name=os.path.basename(name)
if self.kwargs['parent'] and parent==None:
raise Preference.DoesNotExist('Parent Preference not found: {0}'.format(self.kwargs['parent']))
rst=Preference.objects.all()
if name and name!='*':
rst=rst.filter(name=name)
if owner:
rst=rst.filter(owner=owner)
elif self.kwargs['noowner']:
rst=rst.filter(owner__isnull=True)
if parent:
rst=rst.filter(parent=parent)
elif self.kwargs['noparent']:
rst=rst.filter(parent__isnull=True)
if self.kwargs['filepath']:
rst=rst.filter(tipe=AbstractPreference.TYPE_FILEPATH)
rst=rst.order_by('owner', 'parent', 'sequence', 'name')
return rst
def __get_name__( self, name ):
'''
Get the name and sequence according to the name.
@param name The string including the sequence and name. For example, '01.Target' will return a tuple (1, 'Target')
@return A tuple including the sequence and the name
'''
p=re.search(r'^\d+\.', name)
if p:
s=p.group(0)
return name[len(s):].strip(), int(name[0:len(s)-1])
return (name, sys.maxsize if hasattr(sys, 'maxsize') else sys.maxint) #Default append
def output( self, pref, pattern=None ):
pattern=pattern if pattern else self.kwargs['pattern']
print(pattern.format(pref=pref))
pattern=' {0}'.format(pattern)
for ch in pref.childs:
self.output(ch, pattern)
def handle(self, *args, **kwargs):
verbosity=int(kwargs['verbosity'])
if verbosity==3:
logger.setLevel(logging.DEBUG)
elif verbosity==2:
logger.setLevel(logging.INFO)
elif verbosity==1:
logger.setLevel(logging.WARNING)
else:
logger.setLevel(logging.ERROR)
self.kwargs=kwargs
action=kwargs['action']
if action=='import':
self.imp()
elif action=='create': #for backward compatibility
self.set()
elif action=='update': #for backward compatibility
self.set()
elif action=='set':
self.set()
elif action=='delete':
self.delete()
elif action=='show':
self.show()
elif action=='gensecret':
self.gensecret()
elif action=='gendoc':
self.gendoc()
elif action=='export':
self.expCsv()
else:
logger.warning('Unknown action: {0}'.format(action))
logger.warn('DONE!')
def show(self):
logger.info('Showing the preference ...')
q=Preference.objects.all()
if self.kwargs['name']:
logger.info(' with the name filter: {0}'.format(self.kwargs['name']))
if self.kwargs['wildcard'] in self.kwargs['name']:
q=q.filter(name__icontains=self.kwargs['name'].replace(self.kwargs['wildcard'], ''))
else:
q=q.filter |
clarkyzl/flink | flink-python/pyflink/fn_execution/timerservice_impl.py | Python | apache-2.0 | 4,824 | 0.001451 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import collections
import time
from enum import Enum
from pyflink.datastream import TimerService
from pyflink.datastream.tim | erservice import InternalTimer, K, N, InternalTimerService
from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend
class InternalTimerImpl(InternalTimer[K, N]):
def __init__(self, timestamp: int, key: K, namespace: N):
self._timestamp = timestamp
self._key = key
self._namespace = namespace
def get_timestamp(self) -> int:
return | self._timestamp
def get_key(self) -> K:
return self._key
def get_namespace(self) -> N:
return self._namespace
def __hash__(self):
result = int(self._timestamp ^ (self._timestamp >> 32))
result = 31 * result + hash(tuple(self._key))
result = 31 * result + hash(self._namespace)
return result
def __eq__(self, other):
return self.__class__ == other.__class__ and self._timestamp == other._timestamp \
and self._key == other._key and self._namespace == other._namespace
class TimerOperandType(Enum):
REGISTER_EVENT_TIMER = 0
REGISTER_PROC_TIMER = 1
DELETE_EVENT_TIMER = 2
DELETE_PROC_TIMER = 3
class InternalTimerServiceImpl(InternalTimerService[N]):
"""
Internal implementation of InternalTimerService.
"""
def __init__(self, keyed_state_backend: RemoteKeyedStateBackend):
self._keyed_state_backend = keyed_state_backend
self._current_watermark = None
self.timers = collections.OrderedDict()
def current_processing_time(self):
return int(time.time() * 1000)
def current_watermark(self):
return self._current_watermark
def advance_watermark(self, watermark: int):
self._current_watermark = watermark
def register_processing_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.REGISTER_PROC_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def register_event_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.REGISTER_EVENT_TIMER,
InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def delete_processing_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.DELETE_PROC_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def delete_event_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.DELETE_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
class TimerServiceImpl(TimerService):
"""
Internal implementation of TimerService.
"""
def __init__(self, internal_timer_service: InternalTimerServiceImpl):
self._internal = internal_timer_service
self.timers = self._internal.timers
def current_processing_time(self) -> int:
return self._internal.current_processing_time()
def current_watermark(self) -> int:
return self._internal.current_watermark()
def advance_watermark(self, wm):
self._internal.advance_watermark(wm)
def register_processing_time_timer(self, t: int):
self._internal.register_processing_time_timer(None, t)
def register_event_time_timer(self, t: int):
self._internal.register_event_time_timer(None, t)
def delete_processing_time_timer(self, t: int):
self._internal.delete_processing_time_timer(None, t)
def delete_event_time_timer(self, t: int):
self._internal.delete_event_time_timer(None, t)
|
eljost/pysisyphus | deprecated/tests/test_baker_ts/test_baker_ts.py | Python | gpl-3.0 | 6,474 | 0.000618 | #!/usr/bin/env python3
import itertools as it
from pathlib import Path
from pprint import pprint
import shutil
import time
import numpy as np
import pandas as pd
from pysisyphus.calculators.Gaussian16 import Gaussian16
from pysisyphus.calculators.PySCF import PySCF
from pysisyphus.color import red, green
from pysisyphus.helpers import get_baker_ts_geoms, do_final_hessian, \
geom_from_library, get_baker_ts_geoms_flat, \
geom_loader
from pysisyphus.intcoords.augment_bonds import augment_bonds
from pysisyphus.tsoptimizers import *
def print_summary(converged, failed, cycles, ran, runid):
ran_ = f"{ran+1:02d}"
print(f"converged: {converged:02d}/{ran_}")
print(f" failed: {failed:d}")
print(f" cycles: {cycles}")
print(f" run: {runid}")
def run_baker_ts_opts(geoms, meta, coord_type="cart", thresh="baker", runid=0):
"""From 10.1002/(SICI)1096-987X(199605)17:7<888::AID-JCC12>3.0.CO;2-7"""
start = time.time()
converged = 0
failed = 0
cycles = 0
opt_kwargs = {
"thresh": thresh,
# "max_cycles": 150,
"max_cycles": 100,
# "max_cycles": 50,
"dump": True,
"trust_radius": 0.3,
"trust_max": 0.3,
# "max_micro_cycles": 1,
}
results = dict()
for i, (name, geom) in enumerate(geoms.items()):
print(f"@Running {name}")
charge, mult, ref_energy = meta[name]
calc_kwargs = {
"charge": charge,
"mult": mult,
"pal": 4,
}
geom.set_calculator(Gaussian16(route="HF/3-21G", **calc_kwargs))
geom = augment_bonds(geom)
# geom.set_calculator(PySCF(basis="321g", **calc_kwargs))
# opt = RSPRFOptimizer(geom, **opt_kwargs)
opt = RSIRFOptimizer(geom, **opt_kwargs)
# opt = RSIRFOptimizer(geom, **opt_kwargs)
# opt = TRIM(geom, **opt_kwargs)
opt.run()
if opt.is_converged:
converged += 1
else:
failed += 1
cycles += opt.cur_cycle + 1
energies_match = np.allclose(geom.energy, ref_energy)
try:
assert np.allclose(geom.energy, ref_energy)
# Backup TS if optimization succeeded
# ts_xyz_fn = Path(name).stem + "_opt_ts.xyz"
# out_path = Path("/scratch/programme/pysisyphus/xyz_files/baker_ts_opt/")
print(green(f"\t@Energies MATCH for {name}! ({geom.energy:.6f}, {ref_energy:.6f})"))
# with open(out_path / ts_xyz_fn, "w") as handle:
# handle.write(geom.as_xyz())
except AssertionError as err:
print(red(f"\t@Calculated energy {geom.energy:.6f} and reference "
f"energy {ref_energy:.6f} DON'T MATCH'."))
print()
print_summary(converged & energies_match, failed, cycles, i, runid)
print()
results[name] = (opt.cur_cycle + 1, opt.is_converged)
pprint(results)
print()
# do_final_hessian(geom, False)
# print()
end = time.time()
duration = end - start
print(f" runtime: {duration:.1f} s")
print_summary(converged, failed, cycles, i, runid)
return results, duration, cycles
@pytest.mark.benchmark
@using_gaussian16
def _test_baker_ts_optimizations():
coord_type = "redund"
# coord_type = "dlc"
# coord_type = "cart"
thresh = "baker"
runs = 1
all_results = list()
durations = list()
all_cycles = list()
for i in range(runs):
geoms, meta = get_baker_ts_geoms(coord_type=coord_type)
# only = "01_hcn.xyz"
# only = "24_h2cnh.xyz"
# only = "15_hocl.xyz"
# only = "02_hcch.xyz"
# geoms = {
# only: geoms[only],
# }
fails = (
"09_parentdieslalder.xyz",
"12_ethane_h2_abstraction.xyz",
"22_hconhoh.xyz",
"17_claisen.xyz",
"15_hocl.xyz",
)
works = (
"05_cyclopropyl.xyz",
"08_formyloxyethyl.xyz",
"14_vinyl_alcohol.xyz",
"16_h2po4_anion.xyz",
"18_silyene_insertion.xyz",
"04_ch3o.xyz",
"06_bicyclobutane.xyz",
"07_bicyclobutane.xyz",
"23_hcn_h2.xyz",
"01_hcn.xyz",
"25_hcnh2.xyz",
)
math_error_but_works = (
# [..]/intcoords/derivatives.py", line 640, in d2q_d
# x99 = 1/sqrt(x93)
# ValueError: math domain error
# ZeroDivison Fix
"20_hconh3_cation.xyz",
"24_h2cnh.xyz",
"13_hf_abstraction.xyz",
"19_hnccs.xyz",
"21_acrolein_rot.xyz",
"03_h2co.xyz",
)
alpha_negative = (
"02_hcch.xyz",
)
no_imag = (
"10_tetrazine.xyz",
"11_trans_butadiene.xyz",
)
only = (
"18_silyene_insertion.xyz",
# "21_acrolein_rot.xyz",
# "22_hconhoh.xyz",
)
use = (
# fails,
works,
math_error_but_works,
# alpha_negative,
# no_imag,
# only,
)
geoms = {key: geoms[key] for key in it.chain(*use)}
# geoms = {"05_cyclopr | opyl.xyz": geoms["05_cyclopropyl.xyz"]}
results, duration, cycles = run_baker_ts_opts(
geoms,
| meta,
coord_type,
thresh,
runid=i
)
all_results.append(results)
durations.append(duration)
all_cycles.append(cycles)
print(f"@Run {i}, {cycles} cycles")
print(f"@All cycles: {all_cycles}")
print(f"@This runtime: {duration:.1f} s")
print(f"@Total runtime: {sum(durations):.1f} s")
print(f"@")
return
names = list(results.keys())
cycles = {
name: [result[name][0] for result in all_results] for name in names
}
df = pd.DataFrame.from_dict(cycles)
df_name = f"cycles_{coord_type}_{runs}_runs_{thresh}.pickle"
df.to_pickle(df_name)
print(f"Pickled dataframe to {df_name}")
print(f"{runs} runs took {sum(durations):.1f} seconds.")
if __name__ == "__main__":
_test_baker_ts_optimizations()
|
zhouyao1994/incubator-superset | superset/migrations/versions/ad82a75afd82_add_query_model.py | Python | apache-2.0 | 3,118 | 0.001924 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Update models to support storing the queries.
Revision ID: ad82a75afd82
Revises: f162a1dea4c4
Create Date: 2016-07-25 17:48:12.771103
"""
# revision identifiers, used by Alembic.
revision = "ad82a75afd82"
down_revision = "f162a1dea4c4"
import sqlalchemy as sa
from alembic import op
def upgrade():
op.create_table(
"query",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("client_id", sa.String(length=11), nullable=False),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("tmp_table_name", sa.String(length=256), nullable=True),
sa.Column("tab_name", sa.String(length=256), nullable=True),
sa.Column("sql_editor_id", sa.String(length=256), nullable=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("status", sa.String(length=16), nullable=True),
sa.Column("name", sa.String(length=256), nullable=True),
sa.Column("schema", sa.String(length=256), nullable=True),
sa.Column("sql", sa.Text(), nullable=True),
sa.Column("select_sql", sa.Text(), nullable=True),
sa.Column("executed_sql", sa.Text(), nullable=True),
sa.Column("limit", sa.Integer(), nullable=True),
sa.Column("limit_used", sa.Boolean(), nullable=True),
sa.Column("select_as_cta", sa.Boolean(), nullable=True),
sa.Column("select_as_cta_used", sa.Boolean(), nullable=T | rue),
sa.Column("progress", sa.Integer(), nullable=True),
sa.Column("rows", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("start_time", sa.Numeric(precision= | 20, scale=6), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("end_time", sa.Numeric(precision=20, scale=6), nullable=True),
sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]),
sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
sa.PrimaryKeyConstraint("id"),
)
op.add_column(
"dbs", sa.Column("select_as_create_table_as", sa.Boolean(), nullable=True)
)
op.create_index(
op.f("ti_user_id_changed_on"), "query", ["user_id", "changed_on"], unique=False
)
def downgrade():
op.drop_table("query")
op.drop_column("dbs", "select_as_create_table_as")
|
rogerlindberg/autopilot | src/lib/wrappers/passwordentrydialog.py | Python | gpl-3.0 | 2,034 | 0.000492 | # Copyright (C) 2009-2015 Contributors as noted in the AUTHORS file
#
# This file is part of Autopilot.
#
# Autopilot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Autopilot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Autopilot. If not, see <http://www.gnu.org/licenses/>.
import wx
from lib.reporting.logger import Logger
from lib.wrappers.wrapper import Wrapper
from lib.app.constants import TIME_TO_WAIT_FOR_DIALOG_TO_SHOW_IN_MILLISECONDS
from lib.app.decorators import Overrides
wxPasswordEntryDialog = wx.PasswordEntryDialog
class PasswordEntryDialog(wx.PasswordEntr | yDialog, Wrapper):
def __init__(self, *args, **kw):
wxPasswordEntryDialog.__init__(self, *args, **kw)
Wrapper.__init__(self)
def | ShowModal(self, *args, **kw):
self.shown = True
Logger.add_result("Dialog opened")
wx.CallLater(TIME_TO_WAIT_FOR_DIALOG_TO_SHOW_IN_MILLISECONDS, self._register_and_explore)
super(PasswordEntryDialog, self).ShowModal(*args, **kw)
@Overrides(wxPasswordEntryDialog)
def IsShown(self):
return self.shown
@Overrides(wxPasswordEntryDialog)
def Destroy(self, *args, **kw):
self.shown = False
Logger.add_result("Dialog '%s' closed" % self.GetLabel())
wxPasswordEntryDialog.Destroy(self, *args, **kw)
def _register_and_explore(self):
PasswordEntryDialog.register_win(self)
self._explore()
@classmethod
def wrap(self, register_win):
wx.PasswordEntryDialog = PasswordEntryDialog
PasswordEntryDialog.register_win = register_win
|
JorgeDeLosSantos/metal-forming-itc | forming_2D.py | Python | mit | 37,581 | 0.015114 | # -*- coding: mbcs -*-
from part import *
from material import *
from section import *
from assembly import *
from step import *
from interaction import *
from load import *
from mesh import *
from optimization import *
from job import *
from sketch import *
from visualization import *
from connectorBehavior import *
import time
# Define constants
MODEL_NAME = "2D-MODEL"
STEP_PATH = "C:/Users/User/Desktop/LABPro/PI1501 - Rassini-Bypasa/geom/stp/"
STEP_FILES = ["sketch_lower_02","sketch_lower_03","sketch_lower_left_01","sketch_lower_right_01",
"sketch_pisador","sketch_upper_03","sketch_upper_left_01","sketch_upper_right_01",
"sketch_upper_left_02","sketch_upper_right_02"]
DYNEXP_STEPS = ["Initial","Step-01-Down","Step-01-Up","Step-02-Down","Step-02-Up","Step-03-Down","Step-03-Up"]
NFRAMES = 50.0
TIME_PERIOD = 0.86
YDISP = 1.428
MESH_SIZE_QUAD = 0.02
MESH_SIZE_TRI = 0.025
JOB_NAME = "MSZ-"+str(MESH_SIZE_QUAD).replace(".","")+time.strftime("_%d-%m-%Y-%H%M",time.localtime())
mdb.models.changeKey(fromName='Model-1', toName=MODEL_NAME)
# Define parts
# Blank
mdb.openStep(STEP_PATH + 'sketch_mp.STEP', scaleFromFile=OFF)
mdb.models[MODEL_NAME].ConstrainedSketchFromGeometryFile(geometryFile=mdb.acis, name='plate')
mdb.models[MODEL_NAME].ConstrainedSketch(name='__profile__', sheetSize=10.0)
mdb.models[MODEL_NAME].sketches['__profile__'].sketchOptions.setValues(gridOrigin=(0.0, 0.0))
mdb.models[MODEL_NAME].sketches['__profile__'].retrieveSketch(sketch=mdb.models[MODEL_NAME].sketches['plate'])
mdb.models[MODEL_NAME].Part(dimensionality=TWO_D_PLANAR, name='plate', type=DEFORMABLE_BODY)
mdb.models[MODEL_NAME].parts['plate'].BaseShell(sketch=mdb.models[MODEL_NAME].sketches['__profile__'])
del mdb.models[MODEL_NAME].sketches['__profile__']
# Analytic surfaces
for _stp in STEP_FILES:
mdb.openStep(STEP_PATH + _stp + ".STEP", scaleFromFile=OFF)
mdb.models[MODEL_NAME].ConstrainedSketchFromGeometryFile(geometryFile=mdb.acis, name=_stp)
mdb.models[MODEL_NAME].ConstrainedSketch(name='__profile__', sheetSize=10.0)
mdb.models[MODEL_NAME].sketches['__profile__'].sketchOptions.setValues(gridOrigin=(0.0, 0.0))
mdb.models[MODEL_NAME].sketches['__profile__'].retrieveSketch(sketch=mdb.models[MODEL_NAME].sketches[_stp])
mdb.models[MODEL_NAME].Part(dimensionality=TWO_D_PLANAR, name=_stp[7::], type=ANALYTIC_RIGID_SURFACE)
mdb.models[MODEL_NAME].parts[_stp[7::]].AnalyticRigidSurf2DPlanar(sketch=mdb.models[MODEL_NAME].sketches['__profile__'])
del mdb.models[MODEL_NAME].sketches['__profile__']
# Material
mdb.models[MODEL_NAME].Material(name='Acero 1018 US')
mdb.models[MODEL_NAME].materials['Acero 1018 US'].Density(table=((0.10555, ),
))
mdb.models[MODEL_NAME].materials['Acero 1018 US'].Elastic(table=((29700000.0,
0.33), ))
mdb.models[MODEL_NAME].materials['Acero 1018 US'].Plastic(table=((50800.03458,
0.0), (51320.13977, 0.82), (51376.4144, 0.841), (51781.35965, 0.898), (
51784.84056, 0.92), (52105.22884, 0.977), (52140.03789, 0.999), (
52442.8766, 1.056), (52529.17404, 1.078), (52876.8294, 1.135), (
52988.79851, 1.157), (53391.85827, 1.213), (53507.88843, 1.236), (
53929.8031, 1.292), (54093.40563, 1.315), (54475.57997, 1.371), (
54659.92289, 1.394), (55019.18127, 1.45), (55191.92117, 1.473), (
55558.2864, 1.528), (55721.59885, 1.551), (56028.93374, 1.607), (
56243.58953, 1.63), (56553.82517, 1.686), (56798.35873, 1.709), (
57048.25869, 1.764), (57256.53283, 1.788), (57509.91369, 1.843), (
57744.43965, 1.867), (57970.26335, 1.922), (58210.01067, 1.946), (
58405.23141, 2.0), (58661.22295, 2.025), (58860.35972, 2.079), (
59113.88562, 2.104), (59211.64103, 2.158), (59521.15148, 2.183), (
59624.27328, 2.236), (59925.51658, 2.262), (59986.72249, 2.315), (
60343.08012, 2.341), (60397.46926, 2.394), (60699.58279, 2.42), (
60755.27727, 2.472), (61121.35242, 2.498), (61126.2837, 2.551), (
61471.90854, 2.577), (61499.75578, 2.63), (61825.65549, 2.656), (
62178.96733, 2.735), (62518.50059, 2.814), (62831.05683, 2.893), (
63136.65126, 2.971), (63479.37535, 3.05), (63745.51953, 3.129), (
64049.08343, 3.208), (64353.37253, 3.287), (64638.22657, 3.365), (
64915.68369, 3.444), (65132.07994, 3.523), (65419.25459, 3.601), (
65630.42948, 3.68), (65650.44468, 3.702), (65905.85607, 3.759), (
66157.78655, 3.838), (66380.12935, 3.917), (66405.22087, 3.939), (
66614.07516, 3.995), (66626.69344, 4.018), (66811.47147, 4.074), (
66847.73089, 4.096), (67017.27996, 4.153), (67059.05082, 4.175), (
67223.81365, 4.232), (67244.98915, 4.254), (67412.21762, 4.31), (
67430.63741, 4.333), (67593.65978, 4.389), (67648.48403, 4.412), (
67801.64385, 4.468), (67842.39944, 4.491), (67989.75774, 4.547), (
68045.45222, 4.57), (68147.55876, 4.625), (68235.74168, 4.649), (
68309.85595, 4.704), (68399.77932, 4.727), (68457.64936, 4.782), (
68573.67952, 4.807), (68624.44272, 4.861), (68734.96144, 4.885), (
68763.67891, 4.94), (68878.40373, 4.964), (68931.05242, 5.019), (
69078.26568, 5.043), (69230.7003, 5.122), (69405.1 | 8066, 5.201), (69521.936,
5.28), (69665.95844, 5.359), (69781.40845, 5.438), (69915.13321, 5.517), (
70051.75872, 5.595), (70168.51407, 5.674), (70323.41434, 5.753 | ), (
70415.65831, 5.832), (70540.39073, 5.911), (70648.00871, 5.989), (
70769.5503, 6.068), (70848.0157, 6.147), (70950.55735, 6.226), (
71035.11433, 6.305), (71122.42702, 6.384), (71228.44958, 6.462), (
71316.92258, 6.541), (71433.82297, 6.62), (71501.12046, 6.699), (
71590.31864, 6.777), (71635.57041, 6.856), (71722.01288, 6.935), (
71798.30271, 7.014), (71877.9284, 7.092), (71923.47024, 7.171), (
71956.24876, 7.194), (72010.05775, 7.25), (72020.06535, 7.273), (
72045.44695, 7.329), (72086.20254, 7.352), (72130.00393, 7.407), (
72159.01147, 7.43), (72224.27843, 7.51), (72256.76688, 7.565), (
72294.91179, 7.588), (72343.7895, 7.667), (72402.52976, 7.746), (
72468.23184, 7.825), (72507.53706, 7.904), (72582.08644, 7.983), (
72584.40704, 8.062), (72681.43726, 8.141), (72731.62031, 8.22), (
72770.78048, 8.298), (72782.23846, 8.377), (72814.72691, 8.456), (
72891.01674, 8.535), (72944.82572, 8.614), (72957.87912, 8.693), (
72995.00877, 8.771), (72997.90952, 8.85), (73058.82536, 8.929), (
73082.61154, 9.008), (73141.93196, 9.087), (73161.22197, 9.244), (
73232.14541, 9.402), (73240.70263, 9.56), (73281.7483, 9.717), (
73347.01527, 9.796), (73350.06106, 10.347), (73396.03801, 10.425), (
73399.0838, 10.922), (73400.82425, 11.001), (73403.87005, 11.08), (
73414.89291, 11.159), (73416.7784, 11.237), (73221.84773, 11.261), (
73215.46607, 11.419), (73212.27524, 11.498), (73195.45087, 11.655), (
73166.29829, 12.0), (73143.09226, 12.078), (73120.61142, 12.157), (
73089.13824, 12.236), (73066.07724, 12.314), (73033.87887, 12.472), (
73014.87893, 12.55), (72980.505, 12.629), (72971.80274, 12.707), (
72927.71128, 12.786), (72924.08533, 12.865), (72857.07792, 12.943), (
72843.58941, 13.022), (72820.0933, 13.101), (72793.11629, 13.179), (
72746.41415, 13.258), (72680.27696, 13.415), (72649.23889, 13.494), (
72601.23141, 13.572), (72574.97959, 13.651), (72519.57519, 13.729), (
72507.82713, 13.808), (72442.99528, 13.887), (72421.09459, 13.965), (
72361.33906, 14.044), (72300.7133, 14.123), (72243.56845, 14.201), (
72163.07252, 14.28), (72124.49249, 14.359), (72032.82867, 14.437), (
71990.04255, 14.516), (71925.06566, 14.595), (71852.98192, 14.673), (
71748.98989, 14.752), (71652.1047, 14.831), (71559.86073, 14.91), (
71452.53283, 14.988), (71351.15148, 15.067), (71227.86943, 15.146), (
71078.4806, 15.225), (70955.92374, 15.303), (70791.74107, 15.382), (
70787.09986, 15.412), (70650.76442, 15.461), (70596.66536, 15.491), (
70462.9406, 15.539), (70416.81861, 15.569), (70313.55177, 15.618), (
70218.69712, 15.648), (70150.38436, 15.697), (70014.77411, 15.726), (
69924.99577, 15.775), (69814.47705, 15.805), (69767.33979, 15.854), (
69637.09594, 15.884), (69515.98946, 15.933), (69392.56238, 15.962), (
69319.31834, 16.012), (6914 |
carlcarl/rcard | waterfall_wall/views.py | Python | mit | 455 | 0.002198 | from django.sho | rtcuts import render
from rest_framework import viewsets
from waterfall_wall.serializers import ImageSerializer
from waterfall_wall.models import Image
def index(request):
context = {}
return render(request, 'waterfall_wall/index.html', context)
class ImageViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows images to be viewed.
"" | "
queryset = Image.objects.all()
serializer_class = ImageSerializer
|
winhamwr/calabar | calabar/tunnels/__init__.py | Python | bsd-3-clause | 6,085 | 0.002794 | """
calabar.tunnels
This module encapsulates various tunnel processes and their management.
"""
import signal
import os
import sys
import psi.process
TUN_TYPE_STR = 'tunnel_type' # Configuration/dictionary key for the type of tunnel
# Should | match the tunnel_type argument to Tunnel __init__ methods
PROC_NOT_RUNNING = [
psi.process.PROC_STATU | S_DEAD,
psi.process.PROC_STATUS_ZOMBIE,
psi.process.PROC_STATUS_STOPPED
]
def is_really_running(tunnel):
pt = psi.process.ProcessTable()
try:
proc = pt.get(tunnel.proc.pid, None)
except AttributeError:
# we might not actually have a tunnel.proc or it might poof while we're checking
return False
if proc:
status = proc.status
if not status in PROC_NOT_RUNNING:
return True
return False
class TunnelsAlreadyLoadedException(Exception):
"""Once tunnels are loaded the first time, other methods must be used to
update them"""
pass
class ExecutableNotFound(Exception):
"""
The given tunnel executable wasn't found or isn't executable.
"""
pass
class TunnelTypeDoesNotMatch(Exception):
"""
The given ``tun_type`` doesn't match expected Tunnel.
"""
pass
class TunnelManager():
"""
A class for working with multiple :class:`calabar.tunnels.base.TunnelBase`
tunnels.
Creating this tunnels registers it for SIG_CHLD signals, so only ONE
TunnelManager can exist at a time for purposes of keeping the other tunnels
running.
"""
def __init__(self):
self.tunnels = []
self._register_for_close()
def load_tunnels(self, config):
"""
Load config information to create all required tunnels.
"""
if self.tunnels:
raise TunnelsAlreadyLoadedException("TunnelManager.load_tunnels can't be called after tunnels have already been loaded. Use update_tunnels() instead")
tun_confs_d = get_tunnels(config)
for name, tun_conf_d in tun_confs_d.items():
t = self._load_tunnel(name, tun_conf_d)
self.tunnels.append(t)
def _load_tunnel(self, tunnel_name, tun_conf_d):
"""
Create and return a tunnel instance from a ``tun_conf_d`` dictionary.
``tun_conf_d`` is a dictionary matching the output of a tunnel's
implementation of :mod:`calabar.tunnels.base.TunnelBase:parse_configuration`
method.
"""
from calabar.conf import TUNNELS
tun_type = tun_conf_d[TUN_TYPE_STR]
for tunnel in TUNNELS:
if tunnel.TUNNEL_TYPE == tun_type:
t = tunnel(name=tunnel_name, **tun_conf_d)
return t
raise NotImplementedError()
def start_tunnels(self):
"""
Start all of the configured tunnels and register to keep them running.
"""
for t in self.tunnels:
try:
t.open()
except ExecutableNotFound, e:
print >> sys.stderr, e
def continue_tunnels(self):
"""
Ensure that all of the tunnels are still running.
"""
for t in self.tunnels:
if not t.is_running():
print "TUNNEL [%s] EXITED" % t.name
print "RESTARTING"
try:
t.open()
except ExecutableNotFound, e:
print >> sys.stderr, e
else:
print "[%s]:%s running" % (t.name, t.proc.pid)
def _register_for_close(self):
"""
Register the child tunnel process for a close event. This keeps process
from becoming defunct.
"""
signal.signal(signal.SIGCHLD, self._handle_child_close)
# Register for a termination signal so we can clean up children
signal.signal(signal.SIGTERM, self._handle_terminate)
def _handle_terminate(self, signum, frame):
for t in self.tunnels:
t.close(wait=False)
exit()
def _handle_child_close(self, signum, frame):
"""
Handle a closed child.
Call :mod:os.wait() on the process so that it's not defunct.
"""
assert signum == signal.SIGCHLD
print "CHILD TUNNEL CLOSED"
pid, exit_status = os.wait()
for t in self.tunnels:
# For all of the "closing" tunnels, if they've stopped running, handle the close
if t.closing and not t.is_running():
# Assume the same exit_status
t.handle_closed(exit_status)
TUNNEL_PREFIX = 'tunnel:'
def get_tunnels(config):
"""
Return a dictionary of dictionaries containg tunnel configurations based on the
given SafeConfigParser instance.
An example return value might be::
{
'foo':
{
'tunnel_type': 'vpnc',
'conf_file': '/etc/calabar/foo.conf',
'ips': [10.10.254.1]
},
'bar':
{
'tunnel_type': 'ssh',
'from': 'root@10.10.251.2:386',
'to': '127.0.0.1:387
}
}
"""
tun_confs_d = {}
for section in config.sections():
if section.startswith(TUNNEL_PREFIX):
tun_conf_d = parse_tunnel(config, section)
tun_name = section[len(TUNNEL_PREFIX):]
tun_confs_d[tun_name] = tun_conf_d
return tun_confs_d
def parse_tunnel(config, section):
"""
Parse the given ``section`` in the given ``config``
:mod:`ConfigParser.ConfigParser` object to generate a tunnel configuration
dictionary using all configured tunnel types and their configuration
parsers.
"""
from calabar.conf import TUNNELS
tun_type = config.get(section, TUN_TYPE_STR)
for tunnel in TUNNELS:
if tun_type == tunnel.TUNNEL_TYPE:
tun_conf_d = tunnel.parse_configuration(config, section)
return tun_conf_d
raise NotImplementedError("The tunnel type [%s] isn't supported" % tun_type)
|
steny138/twss | twss/__init__.py | Python | apache-2.0 | 23 | 0 | impor | t fetch_from_twse
| |
Shanto/ajenti | plugins/users/main.py | Python | lgpl-3.0 | 10,193 | 0.001373 | from ajenti.ui import *
from ajenti.com import implements
from ajenti.app.api import ICategoryProvider
from ajenti.app.helpers import *
from ajenti.utils import *
import backend
class UsersPlugin(CategoryPlugin):
text = 'Users'
icon = '/dl/users/icon_small.png'
folder = 'system'
platform =['Ubuntu', 'Debian', 'Arch', 'openSUSE']
params = {
'login': 'Login',
'password': 'Password',
'name': 'Name',
'uid': 'UID',
'gid': 'GID',
'ggid': 'GID',
'home': 'Home directory',
'shell': 'Shell',
'adduser': 'New user login',
'addgrp': 'New group name'
}
def on_session_start(self):
self._tab = 0
self._selected_user = ''
self._selected_group = ''
self._editing = ''
def get_ui(self):
panel = UI.PluginPanel(UI.Label(), title='User accounts', icon='/dl/users/icon.png')
panel.append(self.get_default_ui())
return panel
def get_default_ui(self):
self.users = backend.get_all_users()
self.groups = backend.get_all_groups()
backend.map_groups(self.users, self.groups)
tc = UI.TabControl(active=self._tab)
tc.add('Users', self.get_ui_users())
tc.add('Groups', self.get_ui_groups())
if self._editing != '':
tc | = UI.VContainer(tc, UI.InputBox(text=self.params[self._editing], id='dlgEdit'))
return tc
def get | _ui_users(self):
t = UI.DataTable(UI.DataTableRow(
UI.Label(text='Login'),
UI.Label(text='UID'),
UI.Label(text='Home'),
UI.Label(text='Shell'),
UI.Label(), header=True
))
for u in self.users:
t.append(UI.DataTableRow(
UI.DataTableCell(
UI.Image(file='/dl/core/ui/stock/user.png'),
UI.Label(text=u.login, bold=True)
),
UI.Label(text=u.uid, bold=(u.uid>=1000)),
UI.Label(text=u.home),
UI.Label(text=u.shell),
UI.DataTableCell(
UI.MiniButton(id='edit/'+u.login, text='Select'),
hidden=True
)
))
t = UI.VContainer(t, UI.Button(text='Add user', id='adduser'))
if self._selected_user != '':
t = UI.Container(t, self.get_ui_edit_user())
return t
def get_ui_groups(self):
t = UI.DataTable(UI.DataTableRow(
UI.Label(text='Name'),
UI.Label(text='GID'),
UI.Label(text='Users'),
UI.Label(), header=True
))
for u in self.groups:
t.append(UI.DataTableRow(
UI.DataTableCell(
UI.Image(file='/dl/core/ui/stock/group.png'),
UI.Label(text=u.name, bold=True)
),
UI.Label(text=u.gid, bold=(u.gid>=1000)),
UI.Label(text=', '.join(u.users)),
UI.DataTableCell(
UI.MiniButton(id='gedit/'+u.name, text='Select'),
hidden=True
)
))
t = UI.VContainer(t, UI.Button(text='Add group', id='addgrp'))
if self._selected_group != '':
t = UI.Container(t, self.get_ui_edit_group())
return t
def get_ui_edit_user(self):
u = backend.get_user(self._selected_user, self.users)
backend.map_groups([u], backend.get_all_groups())
g = ', '.join(u.groups)
dlg = UI.DialogBox(
UI.LayoutTable(
UI.LayoutTableRow(
UI.Label(text='Login: '+ u.login, bold=True),
UI.Button(text='Change', id='chlogin')
),
UI.LayoutTableRow(
UI.Label(),
UI.Button(text='Change password', id='chpasswd')
),
UI.LayoutTableRow(
UI.Label(),
UI.WarningButton(text='Delete user', id='deluser', msg='Delete user %s'%u.login)
),
UI.LayoutTableRow(
UI.Label(text='UID: '+ str(u.uid)),
UI.Button(text='Change', id='chuid')
),
UI.LayoutTableRow(
UI.Label(text='GID: '+ str(u.gid)),
UI.Button(text='Change', id='chgid')
),
UI.LayoutTableRow(
UI.Label(text='Home directory: '+ u.home),
UI.Button(text='Change', id='chhome')
),
UI.LayoutTableRow(
UI.Label(text='Shell: '+ u.shell),
UI.Button(text='Change', id='chshell')
),
UI.LayoutTableRow(
UI.Label(text='Groups: '),
UI.Button(text='Edit', id='chgroups')
),
UI.LayoutTableRow(
UI.LayoutTableCell(
UI.Label(text=g),
colspan=2
)
)
),
title='Edit user',
id='dlgEditUser'
)
return dlg
def get_ui_edit_group(self):
u = backend.get_group(self._selected_group, self.groups)
g = ', '.join(u.users)
dlg = UI.DialogBox(
UI.LayoutTable(
UI.LayoutTableRow(
UI.Label(text='Name: ' + u.name, bold=True),
UI.Button(text='Change', id='gchname')
),
UI.LayoutTableRow(
UI.Label(),
UI.WarningButton(text='Delete group', id='delgroup', msg='Delete group %s'%u.name)
),
UI.LayoutTableRow(
UI.Label(text='GID: ' + str(u.gid)),
UI.Button(text='Change', id='gchgid')
),
UI.LayoutTableRow(
UI.Label(text='Users: '),
UI.Label()
),
UI.LayoutTableRow(
UI.LayoutTableCell(
UI.Label(text=g),
colspan=2
)
)
),
title='Edit group',
id='dlgEditGroup'
)
return dlg
@event('minibutton/click')
@event('button/click')
def on_click(self, event, params, vars=None):
if params[0] == 'edit':
self._tab = 0
self._selected_user = params[1]
if params[0] == 'gedit':
self._tab = 1
self._selected_group = params[1]
if params[0] == 'chlogin':
self._tab = 0
self._editing = 'login'
if params[0] == 'chuid':
self._tab = 0
self._editing = 'uid'
if params[0] == 'chgid':
self._tab = 0
self._editing = 'gid'
if params[0] == 'chshell':
self._tab = 0
self._editing = 'shell'
if params[0] == 'chpasswd':
self._tab = 0
self._editing = 'password'
if params[0] == 'chhome':
self._tab = 0
self._editing = 'home'
if params[0] == 'gchname':
self._tab = 1
self._editing = 'name'
if params[0] == 'gchgid':
self._tab = 1
self._editing = 'ggid'
if params[0] == 'adduser':
self._tab = 0
self._editing = 'adduser'
if params[0] == 'addgrp':
self._tab = 1
self._editing = 'addgrp'
if params[0] == 'de |
BuildmLearn/University-Campus-Portal-UCP | UCP/discussion/migrations/0012_auto_20160623_1849.py | Python | bsd-3-clause | 442 | 0.002262 | # -*- coding: utf-8 -*-
from __future__ | import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('discussion', '0011_attachment_name'),
]
operations = [
migrations.AlterField(
model_name='attachment',
name='reply',
field=models.ForeignKey(related_name='attachments', to='discussion.Reply'),
| ),
]
|
maxive/erp | addons/l10n_ec/__manifest__.py | Python | agpl-3.0 | 782 | 0 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (C) 201 | 0-2012 Cristian Salamea Gnuthink Software Labs Cia. Ltda
{
'name': 'Ecuador - Accounting',
'version': '1.1',
'category': 'Localization',
'description': """
This is the base module to manage the accounting chart for Ecuador in Odoo.
==============================================================================
Accounting chart and localizat | ion for Ecuador.
""",
'author': 'Gnuthink Co.Ltd.',
'depends': [
'account',
'base_iban',
],
'data': [
'data/l10n_ec_chart_data.xml',
'data/account_data.xml',
'data/account_tax_data.xml',
'data/account_chart_template_data.xml',
],
}
|
rezoo/chainer | tests/chainer_tests/functions_tests/normalization_tests/test_local_response_normalization.py | Python | mit | 2,648 | 0 | import unittest
import numpy
import six
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import backend
@testing.parameterize(*testing.product({
'dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@backend.inject_backend_tests(
['test_forward', 'test_backward'],
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ [{'use_cuda': True}])
class TestLocalResponseNormalization(unittest.TestCase):
def setUp(self):
x = numpy.random.uniform(-1, 1, (2, 7, 3, 2)).astype(self.dtype)
gy = numpy.random.uniform(-1, 1, (2, 7, 3, 2)).astype(self.dtype)
self.inputs = [x]
self.grad_outputs = [gy]
if self.dtype == numpy.float16:
self.check_forward_options = {'atol': 1e-4, 'rtol': 1e-3}
self.check_backward_options = {'atol': 5e-3, 'rtol': 5e-3}
else:
self.check_forward_options = {}
self.check_backward_options = {'atol': 3e-4, 'rtol': 3e-3}
def forward_cpu(self, inputs):
# Naive implementation
x, = inputs
y_expect = numpy.zeros_like(x)
for n, c, h, w in numpy.ndindex(x.shape):
s = 0
for i in six.moves.range(max(0, c - 2), min(7, c + 2)):
s += x[n, i, h, w] ** 2
denom = (2 + 1e-4 * s) ** .75
y_expect[n, c, h, w] = x[n, c, h, w] / denom
return y_expect,
def check_forward(self, inputs, backend_config):
y_expect, = self.forward_cpu(inputs)
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
with backend_config:
y = functio | ns.local_response_normalization(* | inputs)
assert y.data.dtype == self.dtype
testing.assert_allclose(y_expect, y.data, **self.check_forward_options)
def test_forward(self, backend_config):
self.check_forward(self.inputs, backend_config)
def check_backward(self, inputs, grad_outputs, backend_config):
if backend_config.use_cuda:
inputs = cuda.to_gpu(inputs)
grad_outputs = cuda.to_gpu(grad_outputs)
with backend_config:
gradient_check.check_backward(
functions.local_response_normalization, inputs, grad_outputs,
eps=1, dtype=numpy.float64, **self.check_backward_options)
def test_backward(self, backend_config):
self.check_backward(self.inputs, self.grad_outputs, backend_config)
testing.run_module(__name__, __file__)
|
andy-sheng/leetcode | 234-Palindrome-Linked-List.py | Python | mit | 998 | 0.006012 | import math
class Solution(object):
def isPalin | drome(self, head):
"""
:type head: ListNode
:rtype: bool
"""
if not head or not head.next:
return True
length = 0
forward = head
backward = head
while forward:
forward = forward.next
length += 1
mid = math.ceil(length * 1.0 / 2)
flag = length % 2 # 0 means lenght is even 1 means odd
forward = head
forwardTmp = forward.next
while mid > 1: # stop when mid i | s 0
mid -= 1;
backward = forward
forward = forwardTmp
forwardTmp = forwardTmp.next
forward.next = backward
if not flag:
backward = forward
forward = forwardTmp
while(forward):
if forward.val != backward.val:
return False
forward = forward.next;
backward = backward.next;
return True
|
iedparis8/django-helpdesk | management/commands/escalate_tickets.py | Python | bsd-3-clause | 5,876 | 0.003063 | #!/usr/bin/python
"""
django-helpdesk - A Django powered ticket tracker for small enterprise.
(c) Copyright 2008 Jutda. All Rights Reserved. See LICENSE for details.
scripts/escalate_tickets.py - Easy way to escalate tickets based on their age,
designed to be run from Cron or similar.
"""
from datetime import timedelta, date
import getopt
from optparse import make_option
import sys
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from django.utils.translation import ugettext as _
try:
from django.utils import timezone
except ImportError:
from datetime import datetime as timezone
from helpdesk.models import Queue, Ticket, FollowUp, EscalationExclusion, TicketChange
from helpdesk.lib import send_templated_mail, safe_template_context
class Command(BaseCommand):
def __init__(self):
BaseCommand.__init__(self)
self.option_list += (
make_option(
'--queues',
help='Queues to include (default: all). Use queue slugs'),
make_option(
'--verboseescalation',
action='store_true',
default=False,
help='Display a list of dates excluded'),
)
def handle(self, *args, **options):
verbose = False
queue_slugs = None
queues = []
if options['verboseescalation']:
verbose = True
if options['queues']:
queue_slugs = options['queues']
if queue_slugs is not None:
queue_set = queue_slugs.split(',')
for queue in queue_set:
try:
q = Queue.objects.get(slug__exact=queue)
except Queue.DoesNotExist:
raise CommandError("Queue %s does not exist." % queue)
queues.append(queue)
escalate_tickets(queues=queues, verbose=verbose)
def escalate_tickets(queues, verbose):
""" Only include queues with escalation configured """
queryset = Queue.objects.filter(escalate_days__isnull=False).exclude(escalate_days=0)
if queues:
queryset = queryset.filter(slug__in=queues)
for q in queryset:
last = date.today() - timedelta(days=q.escalate_days)
today = date.today()
workdate = last
days = 0
while workdate < today:
if EscalationExclusion.objects.filter(date=workdate).count() == 0:
days += 1
workdate = workdate + timedelta(days=1)
req_last_escl_date = date.today() - timedelta(days=days)
if verbose:
print "Processing: %s" % q
for t in q.ticket_set.filter(
Q(status=Ticket.OPEN_STATUS)
| Q(status=Ticket.REOPENED_STATUS)
).exclude(
priority=1
).filter(
Q(on_hold__isnull=True)
| Q(on_hold=False)
).filter(
Q(last_escalation__lte=req_last_escl_date)
| Q(last_escalation__isnull=True, created__lte=req_last_escl_date)
):
t.last_escalation = timezone.now()
t.priority -= 1
t.save()
context = safe_template_context(t)
if t.submitter_email:
send_templated_mail(
'escalated_submitter',
context,
recipients=t.submitter_email,
sender=t.queue.from_address,
fail_silently=True,
)
if t.queue.updated_ticket_cc:
send_templated_mail(
'escalated_cc',
context,
recipients=t.queue.updated_ticket_cc,
sender=t.queue.from_address,
fail_silently=True,
)
if t.assigned_to:
send_templated_mail(
'escalated_owner',
context,
recipients=t.assigned_to.email,
sender=t.queue.from_address,
fail_silently=True,
)
if verbose:
print " - Esclating %s from %s>%s" % (
t.ticket,
t.priority+1,
t.priority
)
f = FollowUp(
ticket = t,
title = 'Ticket Escalated',
date=timezone.now(),
public=True,
comment=_('Ticket escalated after %s days' % q.escalate_days),
)
f.save()
tc = TicketChange(
followup = f,
field = _('Priority'),
old_value = t.priority + 1,
new_value = t.priority,
)
tc.save()
def usage():
print "Options:"
print " --queues: Queues to include (default: all). Use queue slugs"
print " --verboseescalation: Display a list of dates excluded"
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], ['queues=', 'verboseescalation'])
except getopt.GetoptError:
usage()
sys.exit(2)
verbose = False
queue_slugs = None
queues = []
for o, a in opts:
if o == '--verboseescalation':
verbose = True
if o == '--queues':
queue_slugs = a
if queue_slugs is not None:
queue_set = queue_slugs.spl | it(',')
for queue in queue_set:
try:
q = Queue.objects.get(slug__exact=queue)
except Queue.DoesNotExist:
print "Queue %s does not exist." % queue
sys.exit(2)
queues.append(queue)
escalate_tickets(queues=queue | s, verbose=verbose)
|
codemonkey2841/tradebot | run.py | Python | mit | 6,871 | 0.001746 | #!/usr/bin/env python
""" It's a TradeBot """
import configparser
import curses
from httplib import HTTPException
import os
import signal
import socket
from ssl import SSLError
import sys
import time
from tradebot import TradeBot
def on_exit(sig, func=None):
curses.nocbreak()
stdscr.keypad(0)
curses.echo()
curses.endwin()
curses.curs_set(1)
sys.exit()
def initialize():
# Initialize curses screen
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
stdscr.keypad(1)
curses.curs_set(0)
# Initialize screen
stdscr.addstr(0, 0, "=" * 82)
stdscr.addstr(1, 36, "BTC-E BOT")
stdscr.addstr(2, 0, "-" * 81)
stdscr.addstr(4, 0, "=" * 81)
stdscr.addstr(21, 0, " " * 70)
stdscr.addstr(22, 0, "=" * 82)
for i in range(1, 22):
stdscr.addstr(i, 0, "|")
stdscr.addstr(i, 81, "|")
# Initialize top bar labels
stdscr.addstr(3, 2, "SIM", curses.A_BOLD)
stdscr.addstr(3, 12, "VERBOSE", curses.A_BOLD)
stdscr.addstr(3, 24, "WAIT", curses.A_BOLD)
stdscr.addstr(3, 35, "PAIR", curses.A_BOLD)
stdscr.addstr(3, 50, "THRESH", curses.A_BOLD)
stdscr.addstr(3, 65, "TRADE", curses.A_BOLD)
stdscr.addstr(3, 6, "[ ]")
stdscr.addstr(3, 20, "[ ]")
stdscr.addstr(3, 29, "[ ]")
stdscr.addstr(3, 40, "[ ]")
stdscr.addstr(3, 57, "[ ]")
stdscr.addstr(3, 71, "[ ]")
# Initialize main area labels
stdscr.addstr(5, 2, "BALANCE:", curses.A_BOLD)
stdscr.addstr(5, 67, "PRICE HISTORY", curses.A_UNDERLINE)
stdscr.addstr(6, 57, "CURRENT ->")
stdscr.addstr(7, 2, "STATE:", curses.A_BOLD)
stdscr.addstr(8, 2, "TRADE INCREMENT:", curses.A_BOLD)
stdscr.addstr(9, 2, "TRADE THRESHOLD:", curses.A_BOLD)
stdscr.addstr(10, 2, "AVERAGE PRICE:", curses.A_BOLD)
stdscr.addstr(19, 2, "ORDER LIST ( )", curses.A_UNDERLINE)
stdscr.addstr(12, 2, "TRADE HISTORY", curses.A_UNDERLINE)
stdscr.refresh()
return stdscr
def update(stdscr):
curr1 = tradebot.curr[0].upper()
curr2 = tradebot.curr[1].upper()
stdscr.addstr(1, 56, "%s" % time.asctime())
(state, thresh) = tradebot.get_state()
if state == "buy":
thresh = "< %0.06f" % thresh
elif state == "sell":
thresh = "> %0.06f" % thresh
elif state == "build":
thresh = "%0.06f" % thresh
stdscr.addstr(9, 20, "%s %s" % (thresh, curr2))
stdscr.addstr(10, 20, "%f %s" % (tradebot.average_price(), curr2))
stdscr.addstr(5, 12, "%f %s / %f %s"
% (tradebot.get_balance(1), curr1, tradebot.get_balance(2),curr2))
stdscr.addstr(7, 20, "%s " % state.upper())
stdscr.addstr(8, 20, "%f %s" % (tradebot.get_trade_cost(), curr1))
# Top Bar values
sim = "OFF"
if tradebot.simulation:
sim = "ON"
stdscr.addstr(3, 7, "%3s" % sim)
stdscr.addstr(3, 21, "%s" % args['verbosity'][:1])
stdscr.addstr(3, 30, "%3d" % tradebot.wait)
stdscr.addstr(3, 41, "%s_%s" % (tradebot.curr[0], tradebot.curr[1]))
stdscr.addstr(3, 58, "%.02f%%" % (tradebot.trade_threshold * 100))
stdscr.addstr(3, 72, "%6.02f%%" % (tradebot.trade_increment * 100))
# Price History
line = 6
history = tradebot.get_price_history()
for item in history:
stdscr.addstr(line, 68, "%f %s" % (item, curr2))
line += 1
if line > 21:
break
# Completed trades
history = tradebot.get_trade_history()
line = 13
for item in history:
stdscr.addstr(line, 2, "%s: %s %f @ %.05f %s " % (item.timestamp,
item.type,
item.amount,
item.rate,
curr2))
line += 1
# Order list
orders = tradebot.get_orders()
stdscr.addstr(19, 14, "%2d" % len(orders))
line = 20
stdscr.addstr(20, 2, " " * 40)
stdscr.addstr(21, 2, " " * 40)
for order in orders:
stdscr.addstr(line, 2, "%s %f @ %.05f %s" % (order.type,
order.amount,
order.rate,
curr2))
line += 1
if line > 21:
break
stdscr.refresh()
signal.signal(signal.SIGQUIT, on_exit)
signal.signal(signal.SIGTERM, on_exit)
signal.signal(signal.SIGINT, on_exit)
errlog = 'error.log'
config = configparser.ConfigParser()
config.read('tradebot.conf')
args = {}
if 'api_file' in config['BTC-E']:
args['api_file'] = str(config['BTC-E']['api_file'])
else:
sys.stderr.write('api_file not defined')
sys.exit(1)
with open(args['api_file']) as f:
args['api_key'] = f.readline().strip()
if 'increment' in config['TRADE']:
args['trade_increment'] = float(config['TRADE']['increment'])
else:
args['trade_increment'] = 0.012
if 'threshold' in config['TRADE']:
args['trade_threshold'] = float(config['TRADE']['threshold'])
else:
args['trade_threshold'] = 0.006
if 'pair' in config['BTC-E']:
args['pair'] = str(config['BTC-E']['pair'])
else:
args['pair'] = 'ltc | _btc'
if 'wait' in config['TRADE']:
args['wait'] = int(config['TRADE']['refresh'])
else:
args['wait'] = 15
if 'simulation' in config['MAIN']:
args['simulation'] = str(config['MAIN']['simulation'])
else:
args['simulation'] = 'off'
if | 'verbosity' in config['MAIN']:
args['verbosity'] = config['MAIN']['verbosity'].upper()
else:
args['verbosity'] = "ERROR"
if 'logfile' in config['MAIN']:
args['logfile'] = str(config['MAIN']['logfile'])
else:
args['logfile'] = 'tradebot.log'
if 'db' in config['MAIN']:
args['db'] = str(config['MAIN']['db'])
else:
args['db'] = 'tradebot.db'
sys.stderr = open(errlog, "w")
tradebot = TradeBot(args)
stdscr = initialize()
while True:
try:
stdscr.addstr(21, 2, " " * 70)
tradebot.refresh_price()
for i in range(tradebot.wait):
update(stdscr)
time.sleep(1)
except (SSLError, HTTPException, ValueError, socket.error):
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_WHITE)
for i in range(60):
stdscr.addstr(21, 2, "Failed to connect to exchange. Retrying " \
"in %d" % i, curses.color_pair(1))
except Exception as e:
curses.nocbreak()
#stdscr.keypad(0)
curses.echo()
curses.endwin()
curses.curs_set(1)
import traceback
type_, value_, traceback_ = sys.exc_info()
for line in traceback.format_tb(traceback_):
sys.stderr.write(line)
sys.stderr.write(e.__class__.__name__ + ": ")
sys.stderr.write(e.message)
sys.exit()
|
Nolski/airmozilla | airmozilla/main/tests/views/test_eventdiscussion.py | Python | bsd-3-clause | 4,385 | 0 | from django.contrib.auth.models import User
from funfactory.urlresolvers import reverse
from nose.tools import eq_, ok_
from airmozilla.main.models import Event, EventOldSlug
from airmozilla.comments.models import Discussion
from airmozilla.base.tests.testbase import DjangoTestCase
class TestEventDiscussion(DjangoTestCase):
fixtures = ['airmozilla/manage/tests/main_testdata.json']
def test_link_to_it(self):
event = Event.objects.get(title='Test event')
event_url = reverse('main:event', args=(event.slug,))
edit_url = reverse('main:event_edit', args=(event.slug,))
response = self.client.get(event_url)
url = reverse('main:event_discussion', args=(event.slug,))
eq_(response.status_code, 200)
ok_(url not in response.content)
ok_(edit_url not in response.content)
# let's sign in
user = self._login()
response = self.client.get(event_url)
eq_(response.status_code, 200)
# still not!
ok_(url not in response.content)
ok_(edit_url in response.content)
event.creator = user
event.save()
response = self.client.get(event_url)
eq_(response.status_code, 200)
# still not because there's no discussion set up
ok_(url not in response.content)
ok_(edit_url in response.content)
Discussion.objects.create(event=event)
response = self.client.get(event_url)
eq_(response.status_code, 200)
ok_(url in response.content)
ok_(edit_url in response.content)
def test_permission_access(self):
event = Event.objects.get(title='Test event')
event.privacy = Event.PRIVACY_COMPANY
event.save()
EventOldSlug.objects.create(
slug='old-slug',
event=event
)
bad_url = reverse('main:event_discussion', args=('old-slug',))
response = self.client.get(bad_url)
eq_(response.status_code, 302)
url = reverse('main:event_discussion', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 302)
user = self._login()
response = self.client.get(url)
eq_(response.status_code, 302)
event.creator = user
event.save()
response = self.client.get(url)
eq_(response.status_code, 302)
response = self.client.post(url, {'any': 'thing'})
eq_(response.status_code, 302)
discussion = Discussion.objects.create(
event=event,
enabled=True
)
discussion.moderators.add(user)
discussion.moderators.add(
User.objects.create(email='richard@example.com')
)
response = self.client.get(url)
eq_(response.status_code, 200)
emails = [user.email, 'richard@example.com']
ok_(', '.join(emails) in response.content)
# Now let's try to post something to it
data = {
'enabled': True,
'closed': True,
'notify_all': True,
'moderate_all': True,
'moderators': (', '.join(emails)).upper()
}
response = self.client.post | (url, data)
eq_(response.status_code, 302)
# should have worked
discussion = Discussion.objects.get(
id=discussion.id,
enabled=True,
closed=True,
notify_all=True,
moderate_all=True,
)
eq_(
sorted(x.email for x in discussion.moderators.all()),
emails
)
# try to send a mod | erator email address we don't know about
response = self.client.post(url, dict(
data,
moderators='xxx@example.com'
))
eq_(response.status_code, 200)
ok_(
'xxx@example.com does not exist as a Air Mozilla user'
in response.content
)
response = self.client.post(url, dict(
data,
moderators=', ,\n,,'
))
eq_(response.status_code, 200)
ok_(
'You must have at least one moderator'
in response.content
)
# cancel this time
response = self.client.post(url, dict(
data,
moderators=', ,\n,,',
cancel=''
))
eq_(response.status_code, 302)
|
ActiveState/code | recipes/Python/83698_Patterns_using_classes_dictionary/recipe-83698.py | Python | mit | 1,519 | 0.045425 | class Base:
def __init__(self,v):
self.v=v
class StaticHash(Base):
def __hash__(self):
| if not hasattr(self,"hashvalue"):
self.hashvalue=hash(self.v)
return self.hashvalue
class ImmutableHash(Base):
def __init__(self,v):
self.__dict__["protect"]=[]
Base.__init__(self | ,v)
def __hash__(self):
self.protect.append("v")
return hash(self.v)
def __setattr__(self,k,v):
if k in self.protect:
raise NameError,"%s is protected." % k
else:
self.__dict__[k]=v
class ValueIdentity(ImmutableHash):
def __cmp__(self,x):
if self.v==x.v:
return 0
if self.v<x.v:
return -1
return 1
if __name__=="__main__":
## SHASH:
s1=StaticHash(1)
s2=StaticHash(2)
r={s1:1,s2:2}
s2.v=3
print r[s2]
## IHASH
i1=ImmutableHash(1)
i2=ImmutableHash(2)
r={i1:1,i2:2}
try:
i1.v=100
except NameError,v:
print "NameError,",v
## VALUEID
v1=ValueIdentity(1)
v2=ValueIdentity(2)
if v1==v2:
print "ID1"
v2.v=1
if v1==v2:
print "ID2"
## VALUEHASH
r={v1:1}
print r[v2]
|
STIXProject/stix-ramrod | ramrod/test/stix/stix_1_1_test.py | Python | bsd-3-clause | 4,360 | 0.002752 | # Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from six import StringIO
import ramrod
import ramrod.stix
import ramrod.stix.stix_1_1
import ramrod.utils as utils
from ramrod.test import (_BaseVocab, _BaseTrans)
UPDATER_MOD = ramrod.stix.stix_1_1
UPDATER = UPDATER_MOD.STIX_1_1_Updater
PACKAGE_TEMPLATE = \
"""
<stix:STIX_Package
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:stix="http://stix.mitre.org/stix-1"
xmlns:stixCommon="http://stix.mitre.org/common-1"
xmlns:campaign="http://stix.mitre.org/Campaign-1"
xmlns:indicator="http://stix.mitre.org/Indicator-2"
xmlns:et="http://stix.mitre.org/ExploitTarget-1"
xmlns:ttp="http://stix.mitre.org/TTP-1"
xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1"
xmlns:example="http://example.com/"
xmlns:ramrod="http://ramrod.test/"
version="1.1">
%s
</stix:STIX_Package>
"""
class STIX_1_1_Test(unittest.TestCase):
XML_VERSIONS = PACKAGE_TEMPLATE % ""
@classmethod
def setUpClass(cls):
cls._versions = StringIO(cls.XML_VERSIONS)
def test_get_version(self):
root = utils.get_etree_root(self._versions)
version = UPDATER.get_version(root)
self.assertEqual(version, UPDATER.VERSION)
def test_update_version(self):
valid_versions = ramrod.stix.STIX_VERSIONS
idx = valid_versions.index
version_to = valid_versions[idx(UPDATER.VERSION)+1:]
for version in version_to:
updated = ramrod.update(self._versions, to_=version)
updated_root = updated.document.as_element()
updated_version = UPDATER.get_version(updated_root)
self.assertEqual(version, updated_version)
class IndicatorTypeVocab(_BaseVocab):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
VOCAB_KLASS = UPDATER_MOD.AvailabilityLossVocab
VOCAB_COUNT = 1
VOCAB_XML = \
"""
<stix:Indicators>
<stix:Indicator xsi:type="indicator:IndicatorType">
<indicator:Type xsi:type="stixVocabs:AvailabilityLossTypeVocab-1.0">Degredation</indicator:Type>
</stix:Indicator>
</stix:Indicators>
"""
XML = PACKAGE_TEMPLATE % (VOCAB_XML)
class TransCommonSource(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransCommonSource
TRANS_XPATH = "//stixCommon:Source/stixCommon:Identity/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<stixCommon:Confidence>
<stixCommon:Source>{0}</stixCommon:Source>
</stixCommon:Confidence>
<stixCommon:Confidence>
<stixCommon:Source>{0}</stixCommon:Source>
</stixCommon:Confidence>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
class TransSightingSource(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransSightingsSource
TRANS_XPATH = "//indicator:Sighting/indicator:Source/stixCommon:Identity/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<indicator:Sighting>
<indicator:Source>{0}</indicator:Source>
</indicator:Sighting>
<indicator:Sighting>
<indicator:Source>{0}</indicator:Source>
</indicator:Sighting>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
class TransIndicatorRelatedCampaign(_BaseTrans):
UPDATER = UPDATER_MOD.STIX_1_1_Updater
TRANS_KLASS = UPDATER_MOD.TransIndicatorRelatedCampaign
TRANS_XPATH = "//indicator:Related_Campaigns//indicator:Related_Cam | paign/stixCommon:Campaign/stixCommon:Names/stixCommon:Name"
TRANS_VALUE = _BaseTrans.TRANS_VALUE
TRANS_COUNT = 2
TRANS_XML = \
"""
<indicator:Related_Campaigns>
<indicator:Related_Campaign>
<stixCommon:Names>
<stixCommon:Name>{0}</stixCommon:Name>
| </stixCommon:Names>
</indicator:Related_Campaign>
<indicator:Related_Campaign>
<stixCommon:Names>
<stixCommon:Name>{0}</stixCommon:Name>
</stixCommon:Names>
</indicator:Related_Campaign>
</indicator:Related_Campaigns>
""".format(TRANS_VALUE)
XML = PACKAGE_TEMPLATE % (TRANS_XML)
if __name__ == "__main__":
unittest.main() |
Inboxen/Inboxen | inboxen/account/views/otp.py | Python | agpl-3.0 | 3,760 | 0.00266 | ##
# Copyright (C) 2014 Jessica Tallon & Matt Molyneaux
#
# This file is part of Inboxen.
#
# Inboxen is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Inboxen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
| # GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Inboxen. If not, see <http://www.gnu.org/licenses/>.
##
from django import forms
from | django.contrib import messages
from django.http import Http404
from django.template.response import TemplateResponse
from django.utils.translation import gettext as _
from django.views.decorators.cache import never_cache
from django_otp.decorators import otp_required
from elevate.decorators import elevate_required
from two_factor import forms as two_forms
from two_factor.views import core, profile
from inboxen.account.decorators import anonymous_required
from inboxen.account.forms import PlaceHolderAuthenticationForm
class LoginView(core.LoginView):
template_name = "account/login.html"
form_list = (
('auth', PlaceHolderAuthenticationForm),
('token', two_forms.AuthenticationTokenForm),
('backup', two_forms.BackupTokenForm),
)
def get_form_kwargs(self, step):
if step == "auth":
return {"request": self.request}
else:
return super(LoginView, self).get_form_kwargs(step)
class TwoFactorSetupView(core.SetupView):
template_name = "account/twofactor-setup.html"
form_list = (
('welcome', forms.Form),
('method', two_forms.MethodForm),
('generator', two_forms.TOTPDeviceForm),
)
success_url = "user-twofactor-backup"
qrcode_url = "user-twofactor-qrcode"
def done(self, *args, **kwargs):
out = super(TwoFactorSetupView, self).done(*args, **kwargs)
messages.success(self.request, _("Two factor authentication has been enabled on your account."))
return out
def get_context_data(self, **kwargs):
context = super(TwoFactorSetupView, self).get_context_data(**kwargs)
if self.steps.current == 'generator':
context["secret"] = self.request.session[self.session_key_name]
return context
@never_cache
@otp_required
@elevate_required
def backup_download_view(request):
static_device = request.user.staticdevice_set.get_or_create(name='backup')[0]
if static_device.token_set.count() == 0:
raise Http404
response = TemplateResponse(request, "account/twofactor-backup-download.txt",
context={"tokens": static_device.token_set.all()},
content_type="text/plain")
response["Content-Disposition"] = "attachment; filename=\"inboxen-backup-tokens.txt\""
return response
backup_view = elevate_required(core.BackupTokensView.as_view(template_name="account/twofactor-backup.html",
success_url="user-twofactor-backup"))
disable_view = elevate_required(otp_required(profile.DisableView.as_view(template_name="account/twofactor-disable.html",
success_url="user-settings")))
login = anonymous_required(LoginView.as_view())
setup_view = elevate_required(TwoFactorSetupView.as_view())
qrcode_view = elevate_required(core.QRGeneratorView.as_view())
|
virtool/virtool | tests/dev/test_api.py | Python | mit | 365 | 0 | import pytest
@pytest.mark.parametrize("dev", [True, False])
async def test_dev_mode(dev, spawn_client):
"""
En | sure that developer endpoint is not a | vailable when not in developer mode.
"""
client = await spawn_client(authorize=True, dev=dev)
resp = await client.post("/dev", {"command": "foo"})
assert resp.status == 204 if dev else 404
|
VirgiAgl/V_AutoGP | test/likelihoods_test/softmax_test.py | Python | apache-2.0 | 2,571 | 0.004667 | import unittest
import numpy as np
import tensorflow as tf
from autogp import util
from autogp import likelihoods
SIG_FIGS = 5
class TestSoftmax(unittest.TestCase):
def log_prob(self, outputs, latent):
softmax = likelihoods.Softmax()
return tf.Session().run(softmax.log_cond_prob(np.array(outputs, dtype=np.float32),
| np.array(latent, dtype=np.float32)))
def predict(self, latent_means, latent_vars):
softmax = likeliho | ods.Softmax()
return tf.Session().run(softmax.predict(np.array(latent_means, dtype=np.float32),
np.array(latent_vars, dtype=np.float32)))
def test_single_prob(self):
log_prob = self.log_prob([[1.0, 0.0]], [[[5.0, 2.0]]])
self.assertAlmostEqual(np.exp(log_prob), np.exp(5.0) / (np.exp(5.0) + np.exp(2.0)),
SIG_FIGS)
def test_extreme_probs(self):
log_prob = self.log_prob([[1.0, 0.0],
[0.0, 1.0]],
[[[1e10, -1e10],
[-1e10, 1e10]],
[[-1e10, 1e10],
[1e10, -1e10]]])
true_probs = np.array([[1.0, 1.0],
[0.0, 0.0]])
np.testing.assert_almost_equal(np.exp(log_prob), true_probs, SIG_FIGS)
def test_multi_probs(self):
log_prob = self.log_prob([[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0]],
[[[1.0, 2.0, 3.0],
[4.0, 5.0, 6.0],
[7.0, 8.0, 9.0]],
[[10.0, 11.0, 12.0],
[13.0, 14.0, 15.0],
[16.0, 17.0, 18.0]]])
true_probs = np.array([[np.exp(1.0) / (np.exp(1.0) + np.exp(2.0) + np.exp(3.0)),
np.exp(5.0) / (np.exp(4.0) + np.exp(5.0) + np.exp(6.0)),
np.exp(9.0) / (np.exp(7.0) + np.exp(8.0) + np.exp(9.0))],
[np.exp(10.0) / (np.exp(10.0) + np.exp(11.0) + np.exp(12.0)),
np.exp(14.0) / (np.exp(13.0) + np.exp(14.0) + np.exp(15.0)),
np.exp(18.0) / (np.exp(16.0) + np.exp(17.0) + np.exp(18.0))]])
np.testing.assert_almost_equal(np.exp(log_prob), true_probs, SIG_FIGS)
|
Lektorium-LLC/edx-platform | common/djangoapps/student/views.py | Python | agpl-3.0 | 123,198 | 0.002792 | """
Student Views
"""
import datetime
import json
import logging
import uuid
import warnings
from collections import defaultdict, namedtuple
from urlparse import parse_qs, urlsplit, urlunsplit
import analytics
import edx_oauth2_provider
import waffle
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import AnonymousUser, User
from django.contrib.auth.views import password_reset_confirm
from django.core import mail
from django.core.context_processors import csrf
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy
from django.core.validators import ValidationError, validate_email
from django.db import IntegrityError, transaction
from django.db.models.signals import post_save
from django.dispatch import Signal, receiver
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from django.utils.encoding import force_bytes, force_text
from django.utils.http import base36_to_int, is_safe_url, urlencode, urlsafe_base64_encode
from django.utils.translation import ugettext as _
from django.utils.translation import get_language, ungettext
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_GET, require_POST
from django.views.generic import TemplateView
from ipware.ip import get_ip
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import CourseLocator
from provider.oauth2.models import Client
from pytz import UTC
from ratelimitbackend.exceptions import RateLimitException
from requests import HTTPError
from social_core.backends import oauth as social_oauth
from social_core.exceptions import AuthAlreadyAssociated, AuthException
from social_django import utils as social_utils
import dogstats_wrapper as dog_stats_api
import openedx.core.djangoapps.external_auth.views
import third_party_auth
import track.views
from bulk_email.models import BulkEmailFlag, Optout # pylint: disable=import-error
from certificates.api import get_certificate_url, has_html_certificates_enabled # pylint: disable=import-error
from certificates.models import ( # pylint: disable=import-error
CertificateStatuses,
GeneratedCertificate,
certificate_status_for_student
)
from course_modes.models import CourseMode
from courseware.access import has_access
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from django_comment_common.models import assign_role
from edxmako.shortcuts import render_to_response, render_to_string
from eventtracking import tracker
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.grades.new.course_grade_factory import CourseGradeFactory
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
from openedx.core.djangoapps import monitoring_utils
from openedx.core.djangoapps.catalog.utils import get_programs_with_type
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps.embargo import api as embargo_api
from openedx.core.djangoapps.external_auth.login_and_register import login as external_auth_login
from openedx.core.djangoapps.external_auth.login_and_register import register as external_auth_register
from openedx.core.djangoapps.external_auth.models import ExternalAuthMap
from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.utils import ProgramProgressMeter
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming import helpers as theming_helpers
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
from openedx.core.djangolib.markup import HTML
from openedx.features.course_experience import course_home_url_name
from openedx.features.enterprise_support.api import get_dashboard_consent_notification
from shoppingcart.api import order_history
from shoppingcart.models import CourseRegistrationCode, DonationConfiguration
from student.cookies import delete_logged_in_cookies, set_logged_in_cookies, set_user_info_cookie
from student.forms import AccountCreationForm, PasswordResetFormNoActive, get_registration_extension_form
from student.helpers import (
DISABLE_UNENROLL_CERT_STATES,
auth_pipeline_urls,
check_verify_status_by_course,
destroy_oauth_tokens,
get_next_url_for_login_page
)
from student.models import (
ALLOWEDTOENROLL_TO_ENROLLED,
CourseAccessRole,
CourseEnrollment,
CourseEnrollmentAllowed,
CourseEnrollmentAttribute,
DashboardConfiguration,
LinkedInAddToProfileConfiguration,
LoginFailures,
ManualEnrollmentAudit,
PasswordHistory,
PendingEmailChange,
Registration,
RegistrationCookieConfiguration,
UserAttribute,
UserProfile,
UserSignupSource,
UserStanding,
anonymous_id_for_user,
create_comments_service_user,
unique_id_for_user,
REFUND_ORDER
)
from student.tasks import send_activation_email
from third_party_auth import pipeline, provider
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.db import outer_atomic
from util.json_request import JsonResponse
from util.milestones_helpers import get_pre_requisite_courses_not_completed
from util.password_policy_validators import validate_password_length, validate_password_strength
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
# Used as the name of the user attribute for tracking affiliate registrations
REGISTRATION_AFFILIATE_ID = 'registration_affiliate_id'
REGISTRATION_UTM_PARAMETERS = {
'utm_source': 'registration_utm_source',
'utm_medium': 'registration_utm_medium',
'utm_campaign': 'registration_utm_campaign',
'utm_term': 'registration_utm_term',
'utm_content': 'registration_utm_content',
}
REGISTRATION_UTM_CREATED_AT = 'registration_utm_created_at'
# used to announce a registration
REGISTER_USER = Signal(providing_args=["user", "registration"])
# Disable this warning because it doesn't make sense to c | ompletely refactor tests to appease Pylint
# pylint: disable=logging-format-interpolation
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<di | v style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
programs_list = []
courses = get_courses(user)
if configuration_helpers.get_value(
"ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"],
):
|
yanheven/neutron | neutron/agent/dhcp/agent.py | Python | apache-2.0 | 24,464 | 0.000082 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_utils import importutils
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.metadata import driver as metadata_driver
from neutron.agent import rpc as agent_rpc
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron.i18n import _LE, _LI, _LW
from neutron import manager
LOG = logging.getLogger(__name__)
class DhcpAgent(manager.Manager):
"""DHCP agent service manager.
Note that the public methods of this class are exposed as the server side
of an rpc interface. The neutron server uses
neutron.api.rpc.agentnotifiers.dhcp_rpc_agent_api.DhcpAgentNotifyApi as the
client side to execute the methods here. For more information about
changing rpc interfaces, see doc/source/devref/rpc_api.rst.
"""
target = oslo_messaging.Target(version='1.0')
def __init__(self, host=None, conf=None):
super(DhcpAgent, self).__init__(host=host)
self.needs_resync_reasons = collections.defaultdict(list)
self.conf = conf or cfg.CONF
self.cache = NetworkCache()
self.dhcp_driver_cls = importutils.import_class(self.conf.dhcp_driver)
ctx = context.get_admin_context_without_session()
self.plugin_rpc = DhcpPluginApi(topics.PLUGIN,
ctx, self.conf.use_namespaces,
self.conf.host)
# create dhcp dir to store dhcp info
dhcp_dir = os.path.dirname("/%s/dhcp/" % self.conf.state_path)
utils.ensure_dir(dhcp_dir)
self.dhcp_version = self.dhcp_driver_cls.check_version()
self._populate_networks_cache()
self._process_monitor = external_process.ProcessMonitor(
config=self.conf,
resource_type='dhcp')
def init_host(self):
self.sync_state()
def _populate_networks_cache(self):
"""Populate the networks cache when the DHCP-agent starts."""
try:
existing_networks = self.dhcp_driver_cls.existing_dhcp_networks(
self.conf
)
for net_id in existing_networks:
net = dhcp.NetModel(self.conf.use_namespaces,
{"id": net_id,
"subnets": [],
"ports": []})
self.cache.put(net)
except NotImplementedError:
# just go ahead with an empty networks cache
LOG.debug("The '%s' | DHCP-driver does not support retrieving of a "
"list of existing networks",
self.conf.dhcp_dri | ver)
def after_start(self):
self.run()
LOG.info(_LI("DHCP agent started"))
def run(self):
"""Activate the DHCP agent."""
self.sync_state()
self.periodic_resync()
def call_driver(self, action, network, **action_kwargs):
"""Invoke an action on a DHCP driver instance."""
LOG.debug('Calling driver for network: %(net)s action: %(action)s',
{'net': network.id, 'action': action})
try:
# the Driver expects something that is duck typed similar to
# the base models.
driver = self.dhcp_driver_cls(self.conf,
network,
self._process_monitor,
self.dhcp_version,
self.plugin_rpc)
getattr(driver, action)(**action_kwargs)
return True
except exceptions.Conflict:
# No need to resync here, the agent will receive the event related
# to a status update for the network
LOG.warning(_LW('Unable to %(action)s dhcp for %(net_id)s: there '
'is a conflict with its current state; please '
'check that the network and/or its subnet(s) '
'still exist.'),
{'net_id': network.id, 'action': action})
except Exception as e:
if getattr(e, 'exc_type', '') != 'IpAddressGenerationFailure':
# Don't resync if port could not be created because of an IP
# allocation failure. When the subnet is updated with a new
# allocation pool or a port is deleted to free up an IP, this
# will automatically be retried on the notification
self.schedule_resync(e, network.id)
if (isinstance(e, oslo_messaging.RemoteError)
and e.exc_type == 'NetworkNotFound'
or isinstance(e, exceptions.NetworkNotFound)):
LOG.warning(_LW("Network %s has been deleted."), network.id)
else:
LOG.exception(_LE('Unable to %(action)s dhcp for %(net_id)s.'),
{'net_id': network.id, 'action': action})
def schedule_resync(self, reason, network_id=None):
"""Schedule a resync for a given network and reason. If no network is
specified, resync all networks.
"""
self.needs_resync_reasons[network_id].append(reason)
@utils.synchronized('dhcp-agent')
def sync_state(self, networks=None):
"""Sync the local DHCP state with Neutron. If no networks are passed,
or 'None' is one of the networks, sync all of the networks.
"""
only_nets = set([] if (not networks or None in networks) else networks)
LOG.info(_LI('Synchronizing state'))
pool = eventlet.GreenPool(self.conf.num_sync_threads)
known_network_ids = set(self.cache.get_network_ids())
try:
active_networks = self.plugin_rpc.get_active_networks_info()
active_network_ids = set(network.id for network in active_networks)
for deleted_id in known_network_ids - active_network_ids:
try:
self.disable_dhcp_helper(deleted_id)
except Exception as e:
self.schedule_resync(e, deleted_id)
LOG.exception(_LE('Unable to sync network state on '
'deleted network %s'), deleted_id)
for network in active_networks:
if (not only_nets or # specifically resync all
network.id not in known_network_ids or # missing net
network.id in only_nets): # specific network to sync
pool.spawn(self.safe_configure_dhcp_for_network, network)
pool.waitall()
LOG.info(_LI('Synchronizing state complete'))
except Exception as e:
if only_nets:
for network_id in only_nets:
self.schedule_resync(e, network_id)
else:
self.schedule_resync(e)
LOG.exception(_LE('Unable to sync network state.'))
@utils.exception_logger()
def _periodic_resync_helper(self):
"""Resync the dhcp state at the configured interval."""
while True:
eventlet.sleep(self.conf.resync_interval)
if self.needs_re |
kzys/buildbot | contrib/github_buildbot.py | Python | gpl-2.0 | 6,842 | 0.010085 | #!/usr/bin/env python
"""
github_buildbot.py is based on git_buildbot.py
github_buildbot.py will determine the repository information from the JSON
HTTP POST it receives from github.com and build the appropriate repository.
If your github repository is private, you must add a ssh key to the github
repository for the user who initiated the build on the buildslave.
"""
import tempfile
import logging
import re
import sys
import traceback
from twisted.web import server, resource
from twisted.internet import reactor
from twisted.spread import pb
from twisted.cred import credentials
from optparse import OptionParser
try:
import json
except ImportError:
import simplejson as json
class GitHubBuildBot(resource.Resource):
"""
GitHubBuildBot creates the webserver that responds to the GitHub Service
Hook.
"""
isLeaf = True
github = None
master = None
port = None
def render_POST(self, request):
"""
Reponds only to POST events and starts the build process
:arguments:
request
the http request object
"""
try:
payload = json.loads(request.args['payload'][0])
logging.debug("Payload: " + str(payload))
self.process_change(payload)
except Exception:
logging.error("Encountered an exception:")
for msg in traceback.format_exception(*sys.exc_info()):
logging.error(msg.strip())
def process_change(self, payload, user, repo, github_url):
"""
Consumes the JSON as a python object and actually starts the build.
:arguments:
payload
Python Object that represents the JSON sent by GitHub Service
Hook.
"""
changes = []
newrev = payload['after']
refname = payload['ref']
# We only care about regular heads, i.e. branches
match = re.match(r"^refs\/heads\/(.+)$", refname)
if not match:
logging.info("Ignoring refname `%s': Not a branch" % refname)
branch = match.group(1)
# Find out if the branch was created, deleted or updated. Branches
# being deleted aren't really interesting.
if re.match(r"^0*$", newrev):
logging.info("Branch `%s' deleted, ignoring" % branch)
else:
for commit in payload['commits']:
files = []
files.extend(commit['added'])
files.extend(commit['modified'])
f | iles.extend(commit['removed'])
change = {'revision': commit['id'],
'revlink': commit['url'],
'comments': commit['message'],
'branch': branch,
'who': commit['author']['name']
+ " <" + commit['author']['email'] + ">",
'files': files,
'links': [commit['url']],
'properties': {'repos | itory':
self.repo_url(user, repo, github_url)},
}
changes.append(change)
# Submit the changes, if any
if not changes:
logging.warning("No changes found")
return
host, port = self.master.split(':')
port = int(port)
factory = pb.PBClientFactory()
deferred = factory.login(credentials.UsernamePassword("change",
"changepw"))
reactor.connectTCP(host, port, factory)
deferred.addErrback(self.connectFailed)
deferred.addCallback(self.connected, changes)
def connectFailed(self, error):
"""
If connection is failed. Logs the error.
"""
logging.error("Could not connect to master: %s"
% error.getErrorMessage())
return error
def addChange(self, dummy, remote, changei):
"""
Sends changes from the commit to the buildmaster.
"""
logging.debug("addChange %s, %s" % (repr(remote), repr(changei)))
try:
change = changei.next()
except StopIteration:
remote.broker.transport.loseConnection()
return None
logging.info("New revision: %s" % change['revision'][:8])
for key, value in change.iteritems():
logging.debug(" %s: %s" % (key, value))
deferred = remote.callRemote('addChange', change)
deferred.addCallback(self.addChange, remote, changei)
return deferred
def connected(self, remote, changes):
"""
Reponds to the connected event.
"""
return self.addChange(None, remote, changes.__iter__())
def main():
"""
The main event loop that starts the server and configures it.
"""
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option("-p", "--port",
help="Port the HTTP server listens to for the GitHub Service Hook"
+ " [default: %default]", default=4000, type=int, dest="port")
parser.add_option("-m", "--buildmaster",
help="Buildbot Master host and port. ie: localhost:9989 [default:"
+ " %default]", default="localhost:9989", dest="buildmaster")
parser.add_option("-l", "--log",
help="The absolute path, including filename, to save the log to"
+ " [default: %default]",
default = tempfile.gettempdir() + "/github_buildbot.log",
dest="log")
parser.add_option("-L", "--level",
help="The logging level: debug, info, warn, error, fatal [default:"
+ " %default]", default='warn', dest="level")
parser.add_option("-g", "--github",
help="The github server. Changing this is useful if you've specified"
+ " a specific HOST handle in ~/.ssh/config for github "
+ "[default: %default]", default='github.com',
dest="github")
(options, _) = parser.parse_args()
levels = {
'debug':logging.DEBUG,
'info':logging.INFO,
'warn':logging.WARNING,
'error':logging.ERROR,
'fatal':logging.FATAL,
}
filename = options.log
log_format = "%(asctime)s - %(levelname)s - %(message)s"
logging.basicConfig(filename=filename, format=log_format,
level=levels[options.level])
github_bot = GitHubBuildBot()
github_bot.github = options.github
github_bot.master = options.buildmaster
site = server.Site(github_bot)
reactor.listenTCP(options.port, site)
reactor.run()
if __name__ == '__main__':
main() |
romses/LXC-Web-Panel | tests/api.py | Python | mit | 2,899 | 0.006554 | import subprocess
import unittest
import urllib2
import shutil
import json
import ast
import os
from flask import Flask
from flask.ext.testing import LiveServerTestCa | se
from lwp.app import app
from lwp.utils import connect_db
t | oken = 'myrandomapites0987'
class TestApi(LiveServerTestCase):
db = None
type_json = {'Content-Type': 'application/json'}
def create_app(self):
shutil.copyfile('lwp.db', '/tmp/db.sql')
self.db = connect_db('/tmp/db.sql')
self.db.execute('insert into api_tokens(description, token) values(?, ?)', ['test', token])
self.db.commit()
app.config['DATABASE'] = '/tmp/db.sql'
return app
def test_00_get_containers(self):
shutil.rmtree('/tmp/lxc/', ignore_errors=True)
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/',
headers={'Private-Token': token})
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
#assert isinstance(response.read(), list)
def test_01_put_containers(self):
data = {'name': 'test_vm_sshd', 'template': 'sshd'}
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json' })
request.get_method = lambda: 'PUT'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
assert data['name'] in os.listdir('/tmp/lxc')
def test_02_post_containers(self):
data = {'action': 'start'}
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/test_vm_sshd', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json'})
request.get_method = lambda: 'POST'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_03_delete_containers(self):
request = urllib2.Request(self.get_server_url() + '/api/v1/containers/test_vm_sshd',
headers={'Private-Token': token})
request.get_method = lambda: 'DELETE'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_04_post_token(self):
data = {'token': 'test'}
request = urllib2.Request(self.get_server_url() + '/api/v1/tokens/', json.dumps(data),
headers={'Private-Token': token, 'Content-Type': 'application/json'})
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
def test_05_delete_token(self):
request = urllib2.Request(self.get_server_url() + '/api/v1/tokens/test',
headers={'Private-Token': token})
request.get_method = lambda: 'DELETE'
response = urllib2.urlopen(request)
self.assertEqual(response.code, 200)
if __name__ == '__main__':
unittest.main()
|
txomon/vdsm | vdsm/rpc/bindingxmlrpc.py | Python | gpl-2.0 | 48,255 | 0.000041 | #
# Copyright 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from errno import EINTR
import json
import httplib
import logging
import libvirt
import threading
import re
import sys
from vdsm.password import (ProtectedPassword,
protect_passwords,
unprotect_passwords)
from vdsm import utils
from vdsm import xmlrpc
from vdsm.define import doneCode, errCode
from vdsm.netinfo import getDeviceByIP
import API
from vdsm.exception import VdsmException
try:
from gluster.api import getGlusterMethods
_glusterEnabled = True
except ImportError:
_glusterEnabled = False
class BindingXMLRPC(object):
def __init__(self, cif, log):
self.cif = cif
self.log = log
self._enabled = False
self.server = self._createXMLRPCServer()
def start(self):
"""
Register xml-rpc functions and serve clients until stopped
"""
@utils.traceback(on=self.log.name)
def threaded_start():
self._registerFunctions()
self | .server.timeout = 1
self._enabled = True
while self._enabled:
try:
self.server.handle_request()
except Exception as e:
if e[0] != EINTR:
self.log.error("xml-rpc handler exception",
exc_info=True)
self._thread = threading.Thread(target=threaded_start,
| name='BindingXMLRPC')
self._thread.daemon = True
self._thread.start()
def add_socket(self, connected_socket, socket_address):
self.server.add(connected_socket, socket_address)
def stop(self):
self._enabled = False
self.server.server_close()
self._thread.join()
return {'status': doneCode}
def _createXMLRPCServer(self):
"""
Create xml-rpc server over http
"""
HTTP_HEADER_FLOWID = "FlowID"
threadLocal = self.cif.threadLocal
class RequestHandler(xmlrpc.IPXMLRPCRequestHandler):
# Timeout for the request socket
timeout = 60
log = logging.getLogger("BindingXMLRPC.RequestHandler")
HEADER_POOL = 'Storage-Pool-Id'
HEADER_DOMAIN = 'Storage-Domain-Id'
HEADER_IMAGE = 'Image-Id'
HEADER_VOLUME = 'Volume-Id'
HEADER_TASK_ID = 'Task-Id'
HEADER_RANGE = 'Range'
HEADER_CONTENT_LENGTH = 'content-length'
HEADER_CONTENT_TYPE = 'content-type'
HEADER_CONTENT_RANGE = 'content-range'
class RequestException():
def __init__(self, httpStatusCode, errorMessage):
self.httpStatusCode = httpStatusCode
self.errorMessage = errorMessage
def setup(self):
threadLocal.client = self.client_address[0]
threadLocal.server = self.request.getsockname()[0]
return xmlrpc.IPXMLRPCRequestHandler.setup(self)
def do_GET(self):
try:
length = self._getLength()
img = self._createImage()
startEvent = threading.Event()
methodArgs = {'fileObj': self.wfile,
'length': length}
uploadFinishedEvent, operationEndCallback = \
self._createEventWithCallback()
# Optional header
volUUID = self.headers.getheader(self.HEADER_VOLUME)
response = img.uploadToStream(methodArgs,
operationEndCallback,
startEvent, volUUID)
if response['status']['code'] == 0:
self.send_response(httplib.PARTIAL_CONTENT)
self.send_header(self.HEADER_CONTENT_TYPE,
'application/octet-stream')
self.send_header(self.HEADER_CONTENT_LENGTH, length)
self.send_header(self.HEADER_CONTENT_RANGE,
"bytes 0-%d" % (length - 1))
self.send_header(self.HEADER_TASK_ID, response['uuid'])
self.end_headers()
startEvent.set()
self._waitForEvent(uploadFinishedEvent)
else:
self._send_error_response(response)
except self.RequestException as e:
# This is an expected exception, so traceback is unneeded
self.send_error(e.httpStatusCode, e.errorMessage)
except Exception:
self.send_error(httplib.INTERNAL_SERVER_ERROR,
"error during execution",
exc_info=True)
def do_PUT(self):
try:
contentLength = self._getIntHeader(
self.HEADER_CONTENT_LENGTH,
httplib.LENGTH_REQUIRED)
img = self._createImage()
methodArgs = {'fileObj': self.rfile,
'length': contentLength}
uploadFinishedEvent, operationEndCallback = \
self._createEventWithCallback()
# Optional header
volUUID = self.headers.getheader(self.HEADER_VOLUME)
response = img.downloadFromStream(methodArgs,
operationEndCallback,
volUUID)
if response['status']['code'] == 0:
while not uploadFinishedEvent.is_set():
uploadFinishedEvent.wait()
self.send_response(httplib.OK)
self.send_header(self.HEADER_TASK_ID, response['uuid'])
self.end_headers()
else:
self._send_error_response(response)
except self.RequestException as e:
self.send_error(e.httpStatusCode, e.errorMessage)
except Exception:
self.send_error(httplib.INTERNAL_SERVER_ERROR,
"error during execution",
exc_info=True)
def _createImage(self):
# Required headers
spUUID = self.headers.getheader(self.HEADER_POOL)
sdUUID = self.headers.getheader(self.HEADER_DOMAIN)
imgUUID = self.headers.getheader(self.HEADER_IMAGE)
if not all((spUUID, sdUUID, imgUUID)):
raise self.RequestException(
httplib.BAD_REQUEST,
"missing or empty required header(s):"
" spUUID=%s sdUUID=%s imgUUID=%s"
% (spUUID, sdUUID, imgUUID))
return API.Image(imgUUID, spUUID, sdUUID)
@staticmethod
def _createEventWithCallback():
operationFinishedEvent = threading.Event()
|
kjedruczyk/phabricator-tools | py/abd/abdt_landinglog__t.py | Python | apache-2.0 | 4,640 | 0 | """Test suite for abdt_landinglog."""
# =============================================================================
# TEST PLAN
# -----------------------------------------------------------------------------
# Here we detail the things we are concerned to test and specify which tests
# cover those concerns.
#
# Concerns:
# [ A] can prepend to landinglog when the landinglog ref doesn't exist yet
# [ A] can push a new file to 'refs/arcyd/landinglog' without being rejected
# [ A] can push to 'refs/arcyd/landinglog' and get from another clone
# -----------------------------------------------------------------------------
# Tests:
# [ A] test_A_Breathing
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __ | future__ import print_function
import unittest
import phlsys_fs
import phlsys_git
import phlsys_subprocess
import abdt_landinglog
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_A_Breathing(self):
with phlsys_fs.chtmpdir_context():
fetch_config = str(
'remote.origin.fetc | h=+refs/arcyd/landinglog'
':refs/arcyd/origin/landinglog')
run = phlsys_subprocess.run_commands
run('git init --bare origin')
run('git clone origin dev --config ' + fetch_config)
with phlsys_fs.chdir_context('dev'):
# make an initial commit on the master branch
run('touch README')
run('git add README')
run('git commit README -m initial_commit')
run('git push origin master')
run('git checkout -b myfeature')
# create a new branch with unique content
with open('README', 'w') as f:
f.write('myfeature content')
run('git add README')
run('git commit README -m myfeature_content')
run('git push -u origin myfeature')
dev = phlsys_git.Repo('dev')
# make sure we can prepend a branch to the landinglog when empty
abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567')
log = abdt_landinglog.get_log(dev)
self.assertEqual(1, len(log))
self.assertEqual(log[0].review_sha1, "1234")
self.assertEqual(log[0].name, "myfeature")
self.assertEqual(log[0].landed_sha1, "4567")
# make sure we can prepend another branch
abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901')
log = abdt_landinglog.get_log(dev)
self.assertEqual(2, len(log))
self.assertEqual(log[0].review_sha1, "5678")
self.assertEqual(log[0].name, "newfeature")
self.assertEqual(log[0].landed_sha1, "8901")
self.assertEqual(log[1].review_sha1, "1234")
self.assertEqual(log[1].name, "myfeature")
self.assertEqual(log[1].landed_sha1, "4567")
# make a new, independent clone and make sure we get the same log
abdt_landinglog.push_log(dev, 'origin')
run('git clone origin dev2 --config ' + fetch_config)
with phlsys_fs.chdir_context('dev2'):
run('git fetch')
dev2 = phlsys_git.Repo('dev2')
self.assertListEqual(
abdt_landinglog.get_log(dev),
abdt_landinglog.get_log(dev2))
# prepend the max number of entries and make sure 'newfeature' goes
# for i in xrange(abdt_landinglog._MAX_LOG_LENGTH):
# abdt_landinglog.prepend(dev, '90', 'scrolling')
# log = abdt_landinglog._get_log_raw(dev)
# self.assertNotIn(log, 'newfeature')
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
zsdonghao/tensorlayer | tests/models/test_seq2seq_with_attention.py | Python | apache-2.0 | 3,633 | 0.003303 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
import numpy as np
import tensorflow as tf
from sklearn.utils import shuffle
from tqdm import tqdm
import tensorlayer as tl
from tensorlayer.cost import cross_entropy_seq
from tensorlayer.models.seq2seq_with_attention import Seq2seqLuongAttention
from tests.utils import CustomTestCase
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
class Model_SEQ2SEQ_WITH_ATTENTION_Test(CustomTestCase):
@classmethod
def setUpClass(cls):
cls.batch_size = 16
cls.vocab_size = 200
cls.embedding_size = 32
cls.dec_seq_length = 5
cls.pure_time = np.linspace(-1, 1, 21)
cls.pure_signal = 100 * np.sin(cls.pure_time)
cls.dataset = np.zeros((100, 21))
for i in range(100):
noise = 100 + 1 * np.random.normal(0, 1, cls.pure_signal.shape)
cls.dataset[i] = cls.pure_signal + noise
cls.dataset = cls.dataset.astype(int)
np.random.shuffle(cls.dataset)
cls.trainX = cls.dataset[:80, :15]
cls.trainY = cls.dataset[:80, 15:]
cls.testX = cls.dataset[80:, :15]
cls.testY = cls.dataset[80:, 15:]
cls.trainY[:, 0] = 0 # start_token == 0
cls.testY[:, 0] = 0 # start_token == 0
# Parameters
cls.src_len = len(cls.trainX)
cls.tgt_len = len(cls.trainY)
assert cls.src_len == cls.tgt_len
cls.num_epochs = 500
cls.n_step = cls.src_len // cls.batch_size
@classmethod
def tearDownClass(cls):
pass
def test_basic_simpleSeq2Seq(self):
model_ = Seq2seqLuongAttention(
hidden_size=128, cell=tf.keras.layers.SimpleRNNCell,
embedding_layer=tl.layers.Embedding(vocabulary_size=self.vocab_si | ze,
embedding_size=self.embedding_size), method='dot'
)
optimizer = tf.optimizers.Adam(learning_rate=0.001)
for epoch in range(self.num_epochs):
model_.train()
trainX, trainY = shuffle(self.trainX, self.trainY)
total_loss, n_iter = 0, 0
for X, Y in tqdm(tl.iterate.minibatches(inputs=trainX, targets=trainY, batch_size=self.batch_size,
| shuffle=False), total=self.n_step,
desc='Epoch[{}/{}]'.format(epoch + 1, self.num_epochs), leave=False):
dec_seq = Y[:, :-1]
target_seq = Y[:, 1:]
with tf.GradientTape() as tape:
## compute outputs
output = model_(inputs=[X, dec_seq])
# print(output)
output = tf.reshape(output, [-1, self.vocab_size])
loss = cross_entropy_seq(logits=output, target_seqs=target_seq)
grad = tape.gradient(loss, model_.trainable_weights)
optimizer.apply_gradients(zip(grad, model_.trainable_weights))
total_loss += loss
n_iter += 1
model_.eval()
test_sample = self.testX[:5, :].tolist() # Can't capture the sequence.
top_n = 1
for i in range(top_n):
prediction = model_([test_sample], seq_length=self.dec_seq_length, sos=0)
print("Prediction: >>>>> ", prediction, "\n Target: >>>>> ", self.testY[:5, 1:], "\n\n")
# printing average loss after every epoch
print('Epoch [{}/{}]: loss {:.4f}'.format(epoch + 1, self.num_epochs, total_loss / n_iter))
if __name__ == '__main__':
unittest.main()
|
easytaxibr/redash | redash/handlers/queries.py | Python | bsd-2-clause | 5,877 | 0.003233 | from flask import request
from flask_restful import abort
from flask_login import login_required
import sqlparse
from funcy import distinct, take
from itertools import chain
from redash.handlers.base import routes, org_scoped_rule, paginate
from redash.handlers.query_results import run_query
from redash import models
from redash.permissions import require_permission, require_access, require_admin_or_owner, not_view_only, view_only, \
require_object_modify_permission, can_modify
from redash.handlers.base import BaseResource, get_object_or_404
from redash.utils import collect_parameters_from_request
@routes.route(org_scoped_rule('/api/queries/format'), methods=['POST'])
@login_required
def format_sql_query(org_slug=None):
arguments = request.get_json(force=True)
query = arguments.get("query", "")
return sqlparse.format(query, reindent=True, keyword_case='upper')
class QuerySearchResource(BaseResource):
@require_permission('view_query')
def get(sel | f):
term = request.args.get('q', '')
return [q.to_dict(with_last_modified_by=False) for q in models.Query.search(term, self.current_user.groups)]
class QueryRecentResource(BaseResource):
@require_permission('view_query')
def get(self):
queries = models.Query.recent(self.current_user.groups, self.curr | ent_user.id)
recent = [d.to_dict(with_last_modified_by=False) for d in queries]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict(with_last_modified_by=False) for d in models.Query.recent(self.current_user.groups)]
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
class QueryListResource(BaseResource):
@require_permission('create_query')
def post(self):
query_def = request.get_json(force=True)
data_source = models.DataSource.get_by_id_and_org(query_def.pop('data_source_id'), self.current_org)
require_access(data_source.groups, self.current_user, not_view_only)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
query_def.pop(field, None)
# If we already executed this query, save the query result reference
if 'latest_query_data_id' in query_def:
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
query_def['user'] = self.current_user
query_def['data_source'] = data_source
query_def['org'] = self.current_org
query = models.Query.create(**query_def)
self.record_event({
'action': 'create',
'object_id': query.id,
'object_type': 'query'
})
return query.to_dict()
@require_permission('view_query')
def get(self):
results = models.Query.all_queries(self.current_user.groups)
page = request.args.get('page', 1, type=int)
page_size = request.args.get('page_size', 25, type=int)
return paginate(results, page, page_size, lambda q: q.to_dict(with_stats=True, with_last_modified_by=False))
class MyQueriesResource(BaseResource):
@require_permission('view_query')
def get(self):
drafts = request.args.get('drafts') is not None
results = models.Query.by_user(self.current_user, drafts)
page = request.args.get('page', 1, type=int)
page_size = request.args.get('page_size', 25, type=int)
return paginate(results, page, page_size, lambda q: q.to_dict(with_stats=True, with_last_modified_by=False))
class QueryResource(BaseResource):
@require_permission('edit_query')
def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
query_def = request.get_json(force=True)
require_object_modify_permission(query, self.current_user)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by', 'org']:
query_def.pop(field, None)
if 'latest_query_data_id' in query_def:
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
query_def['changed_by'] = self.current_user
try:
query.update_instance(**query_def)
except models.ConflictDetectedError:
abort(409)
# old_query = copy.deepcopy(query.to_dict())
# new_change = query.update_instance_tracked(changing_user=self.current_user, old_object=old_query, **query_def)
# abort(409) # HTTP 'Conflict' status code
result = query.to_dict(with_visualizations=True)
return result
@require_permission('view_query')
def get(self, query_id):
q = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(q.groups, self.current_user, view_only)
result = q.to_dict(with_visualizations=True)
result['can_edit'] = can_modify(q, self.current_user)
return result
# TODO: move to resource of its own? (POST /queries/{id}/archive)
def delete(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_admin_or_owner(query.user_id)
query.archive(self.current_user)
class QueryRefreshResource(BaseResource):
def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
require_access(query.groups, self.current_user, not_view_only)
parameter_values = collect_parameters_from_request(request.args)
return run_query(query.data_source, parameter_values, query.query, query.id)
|
lostcaggy/coderbot | config.py | Python | gpl-2.0 | 1,410 | 0.007092 | ############################################################################
# CoderBot, a didactical programmable robot.
# Copyright (C) 2014, 2015 Roberto Previtera <info@coderbot.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You sh | ould have received a copy of the GNU General Public License along
# with this program; if not, write to the F | ree Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
############################################################################
import json
CONFIG_FILE = "coderbot.cfg"
class Config:
_config = {}
@classmethod
def get(cls):
return cls._config
@classmethod
def read(cls):
f = open(CONFIG_FILE, 'r')
cls._config = json.load(f)
return cls._config
@classmethod
def write(cls, config):
cls._config = config
f = open(CONFIG_FILE, 'w')
json.dump(cls._config, f)
return cls._config
|
signal/suropy | suro/thriftgen/constants.py | Python | apache-2.0 | 244 | 0.008197 | #
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO | NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException |
from ttypes import *
|
aliksey/projecteuler | problems/problem_062.py | Python | mit | 1,000 | 0 | # -------------------------------------------------------------------------------
# Name: Problem 62
# Purpose: projecteuler.net
#
# Author: aliksey
#
# Created: 06.04.2012
# Copyright: (c) aliksey 2012
# Licence: <your licence>
# --------------------------------------------------------------- | ----------------
import sys
def getID(n):
arr = []
number = 0
while n > 0:
arr.append(n % 10)
n //= 10
arr = sorted(arr)
for i in range(len(arr)):
number += arr[i] * 10 ** i
return number
def main():
amount = dict()
number = dict()
for i in range(1, 10000):
n | = i ** 3
m = getID(n)
if m in amount.keys():
amount[m] += 1
if n < number[m]:
number[m] = n
else:
amount[m] = 1
number[m] = n
if amount[m] == 5:
print "Found 5er:", number[m]
sys.exit()
if __name__ == '__main__':
main()
|
gi0baro/weppy-assets | weppy_assets/webassets/script.py | Python | bsd-3-clause | 22,478 | 0.000667 | from __future__ import print_function
import shutil
import os, sys
import time
import logging
from .loaders import PythonLoader, YAMLLoader
from .bundle import get_all_bundle_files
from .exceptions import BuildError
from .updater import TimestampUpdater
from .merge import MemoryHunk
from .version import get_manifest
from .cache import FilesystemCache
from .utils import set, StringIO
__all__ = ('CommandError', 'CommandLineEnvironment', 'main')
# logging has WARNING as default level, for the CLI we want INFO. Set this
# as early as possible, so that user customizations will not be overwritten.
logging.getLogger('webassets.script').setLevel(logging.INFO)
class CommandError(Exception):
pass
class Command(object):
"""Base-class for a command used by :class:`CommandLineEnvironment`.
Each command being a class opens up certain possibilities with respect to
subclassing and customizing the default CLI.
"""
def __init__(self, cmd_env):
self.cmd = cmd_env
def __getattr__(self, name):
# Make stuff from cmd environment easier to access
return getattr(self.cmd, name)
def __call__(self, *args, **kwargs):
raise NotImplementedError()
class BuildCommand(Command):
def __call__(self, bundles=None, output=None, directory=None, no_cache=None,
manifest=None, production=None):
"""Build assets.
``bundles``
A list of bundle names. If given, only this list of bundles
should be built.
``output``
List of (bundle, filename) 2-tuples. If given, only these
bundles will be built, using the custom output filenames.
Cannot be used with ``bundles``.
``directory``
Custom output directory to use for the bundles. The original
basenames defined in the bundle ``output`` attribute will be
used. If the ``output`` of the bundles are pointing to different
directories, they will be offset by their common prefix.
Cannot be used with ``output``.
``no_cache``
If set, a cache (if one is configured) will not be used.
``manifest``
If set, the given manifest instance will be used, instead of
any that might have been configured in the Environment. The value
passed will be resolved through ``get_manifest()``. If this fails,
a file-based manifest will be used using the given value as the
filename.
``production``
If set to ``True``, then :attr:`Environment.debug`` will forcibly
be disabled (set to ``False``) during the build.
"""
# Validate arguments
if bundles and output:
raise CommandError(
'When specifying explicit output filenames you must '
'do so for all bundles you want to build.')
if directory and output:
raise CommandError('A custom output directory cannot be '
'combined with explicit output filenames '
'for individual bundles.')
if production:
# TODO: Reset again (refactor commands to be classes)
self.environment.debug = False
# TODO: O | h how nice it would be to use the future options stack.
if ma | nifest is not None:
try:
manifest = get_manifest(manifest, env=self.environment)
except ValueError:
manifest = get_manifest(
# abspath() is important, or this will be considered
# relative to Environment.directory.
"file:%s" % os.path.abspath(manifest),
env=self.environment)
self.environment.manifest = manifest
# Use output as a dict.
if output:
output = dict(output)
# Validate bundle names
bundle_names = bundles if bundles else (output.keys() if output else [])
for name in bundle_names:
if not name in self.environment:
raise CommandError(
'I do not know a bundle name named "%s".' % name)
# Make a list of bundles to build, and the filename to write to.
if bundle_names:
# TODO: It's not ok to use an internal property here.
bundles = [(n,b) for n, b in self.environment._named_bundles.items()
if n in bundle_names]
else:
# Includes unnamed bundles as well.
bundles = [(None, b) for b in self.environment]
# Determine common prefix for use with ``directory`` option.
if directory:
prefix = os.path.commonprefix(
[os.path.normpath(b.resolve_output())
for _, b in bundles if b.output])
# dirname() gives the right value for a single file.
prefix = os.path.dirname(prefix)
to_build = []
for name, bundle in bundles:
# TODO: We really should support this. This error here
# is just in place of a less understandable error that would
# otherwise occur.
if bundle.is_container and directory:
raise CommandError(
'A custom output directory cannot currently be '
'used with container bundles.')
# Determine which filename to use, if not the default.
overwrite_filename = None
if output:
overwrite_filename = output[name]
elif directory:
offset = os.path.normpath(
bundle.resolve_output())[len(prefix)+1:]
overwrite_filename = os.path.join(directory, offset)
to_build.append((bundle, overwrite_filename, name,))
# Build.
built = []
for bundle, overwrite_filename, name in to_build:
if name:
# A name is not necessary available of the bundle was
# registered without one.
self.log.info("Building bundle: %s (to %s)" % (
name, overwrite_filename or bundle.output))
else:
self.log.info("Building bundle: %s" % bundle.output)
try:
if not overwrite_filename:
with bundle.bind(self.environment):
bundle.build(force=True, disable_cache=no_cache)
else:
# TODO: Rethink how we deal with container bundles here.
# As it currently stands, we write all child bundles
# to the target output, merged (which is also why we
# create and force writing to a StringIO instead of just
# using the ``Hunk`` objects that build() would return
# anyway.
output = StringIO()
with bundle.bind(self.environment):
bundle.build(force=True, output=output,
disable_cache=no_cache)
if directory:
# Only auto-create directories in this mode.
output_dir = os.path.dirname(overwrite_filename)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
MemoryHunk(output.getvalue()).save(overwrite_filename)
built.append(bundle)
except BuildError as e:
self.log.error("Failed, error was: %s" % e)
if len(built):
self.event_handlers['post_build']()
if len(built) != len(to_build):
return 2
class WatchCommand(Command):
def __call__(self, loop=None):
"""Watch assets for changes.
``loop``
A callback, taking no arguments, to be called once every loop
iteration. Can be useful to integrate the command with other code.
If not specified, the loop wil call ``time.sleep()``.
"""
# TODO: This should probably also restart whe |
pjdelport/pip | tests/functional/test_install.py | Python | mit | 24,544 | 0 |
import os
import textwrap
import glob
from os.path import join, curdir, pardir
import pytest
from pip.utils import rmtree
from tests.lib import pyversion
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
@pytest.mark.network
def test_without_setuptools(script):
script.run("pip", "uninstall", "setuptools", "-y")
result = script.run(
"python", "-c",
"import pip; pip.main(['install', 'INITools==0.2', '--no-use-wheel'])",
expect_error=True,
)
assert (
"setuptools must be installed to install from a source distribution"
in result.stdout
)
@pytest.mark.network
def test_pip_second_command_line_interface_works(script):
"""
Check if ``pip<PYVERSION>`` commands behaves equally
"""
args = ['pip%s' % pyversion]
args.extend(['install', 'INITools==0.2'])
result = script.run(*args)
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_from_pypi(script):
"""
Test installing a package from PyPI.
"""
result = script.pip('install', '-vvv', 'INITools==0.2')
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
def test_editable_install(script):
"""
Test editable installation.
"""
result = script.pip('install', '-e', 'INITools==0.2', expect_error=True)
assert (
"INITools==0.2 should either be a path to a local project or a VCS url"
in result.stdout
)
assert not result.files_created
assert not result.files_updated
@pytest.mark.network
def test_install_editable_from_svn(script, tmpdir):
"""
Test checking out from svn.
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
)
)
result.assert_installed('INITools', with_files=['.svn'])
@pytest.mark.network
def test_download_editable_to_custom_path(script, tmpdir):
"""
Test downloading an editable using a relative custom src folder.
"""
script.scratch_path.join("customdl").mkdir()
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
),
'--src',
'customsrc',
'--download',
'customdl',
)
customsrc = Path('scratch') / 'customsrc' / 'initools'
assert customsrc in result.files_created, (
sorted(result.files_created.keys())
)
assert customsrc / 'setup.py' in result.files_created, (
sorted(result.files_created.keys())
)
customdl = Path('scratch') / 'customdl' / 'initools'
customdl_files_created = [
filename for filename in result.files_created
if filename.startswith(customdl)
]
assert customdl_files_created
@pytest.mark.network
def test_editable_no_install_followed_by_no_download(script, tmpdir):
"""
Test installing an editable in two steps (first with --no-install, then
with --no-download).
"""
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-install',
expect_error=True,
)
result.assert_installed( |
'INITools', without_egg_link=True, with_files=['.svn'],
)
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http | ://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
),
'--no-download',
expect_error=True,
)
result.assert_installed('INITools', without_files=[curdir, '.svn'])
@pytest.mark.network
def test_no_install_followed_by_no_download(script):
"""
Test installing in two steps (first with --no-install, then with
--no-download).
"""
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
build_dir = script.venv / 'build' / 'INITools'
result1 = script.pip(
'install', 'INITools==0.2', '--no-install', expect_error=True,
)
assert egg_info_folder not in result1.files_created, str(result1)
assert initools_folder not in result1.files_created, (
sorted(result1.files_created)
)
assert build_dir in result1.files_created, result1.files_created
assert build_dir / 'INITools.egg-info' in result1.files_created
result2 = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, (
sorted(result2.files_created)
)
assert build_dir not in result2.files_created
assert build_dir / 'INITools.egg-info' not in result2.files_created
def test_bad_install_with_no_download(script):
"""
Test that --no-download behaves sensibly if the package source can't be
found.
"""
result = script.pip(
'install', 'INITools==0.2', '--no-download', expect_error=True,
)
assert (
"perhaps --no-download was used without first running "
"an equivalent install with --no-install?" in result.stdout
)
@pytest.mark.network
def test_install_dev_version_from_pypi(script):
"""
Test using package==dev.
"""
result = script.pip(
'install', 'INITools===dev',
'--allow-external', 'INITools',
'--allow-unverified', 'INITools',
expect_error=True,
)
assert (script.site_packages / 'initools') in result.files_created, (
str(result.stdout)
)
@pytest.mark.network
def test_install_editable_from_git(script, tmpdir):
"""
Test cloning from Git.
"""
args = ['install']
args.extend([
'-e',
'%s#egg=pip-test-package' %
local_checkout(
'git+http://github.com/pypa/pip-test-package.git',
tmpdir.join("cache"),
),
])
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('pip-test-package', with_files=['.git'])
@pytest.mark.network
def test_install_editable_from_hg(script, tmpdir):
"""
Test cloning from Mercurial.
"""
result = script.pip(
'install', '-e',
'%s#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('ScriptTest', with_files=['.hg'])
@pytest.mark.network
def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
script.pip(
'install', '-e',
'%s/#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
)
@pytest.mark.network
def test_install_editable_from_bazaar(script, tmpdir):
"""
Test checking out from Bazaar.
"""
result = script.pip(
'install', '-e',
'%s/@174#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
expect_error=True,
)
result.assert_installed('django-wikiapp', with_files=['.bzr'])
@pytest.mark.network
def test_vcs_url_urlquote_ |
antoinecarme/pyaf | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_Lag1Trend_Seasonal_DayOfWeek_SVR.py | Python | bsd-3-clause | 170 | 0.047059 | import tests | .model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['Lag1Trend'] , ['Seasonal_DayOfWeek'] , | ['SVR'] ); |
MinchinWeb/wm_todo | tests/test_append.py | Python | gpl-3.0 | 1,200 | 0.0075 | # TODO.TXT-CLI-python test script
# Copyright (C) 2011-2012 Sigmavirus24, Jeff Stein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABIL | ITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have | received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# TLDR: This is licensed under the GPLv3. See LICENSE for more details.
import unittest
import base
import todo
class AppendTest(base.BaseTest):
def test_append(self):
todo.cli.addm_todo("\n".join(self._test_lines_no_pri(self.num)))
for i in range(1, self.num + 1):
todo.cli.append_todo([str(i), "testing", "append"])
self.assertNumLines(self.num, "Test\s\d+\stesting\sappend")
if __name__ == "__main__":
unittest.main()
|
boerngen-schmidt/commuter-simulation | code/simulation/car.py | Python | gpl-3.0 | 5,072 | 0.001577 | """
Created on 11.09.2014
@author: benjamin@boerngen-schmidt.de
"""
from abc import ABCMeta, abstractmethod
import random
import datetime
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
"""Returns the nodes target ID
:rtype: int
"""
return self._current_position
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling st | ation"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arriv | al at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning()
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.08
class RefillWarning(Exception):
pass
|
lucperkins/heron | heron/tools/ui/src/python/handlers/common/utils.py | Python | apache-2.0 | 834 | 0.003597 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache Lic | ense, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS I | S" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' utils.py '''
# pylint: disable=invalid-name
def className(selected, item):
'''
:param selected:
:param item:
:return:
'''
if selected == item:
return "active"
return ''
|
jinnykoo/christmas | src/oscar/apps/order/south_migrations/0012_auto__add_field_paymentevent_reference.py | Python | bsd-3-clause | 33,777 | 0.007786 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PaymentEvent.reference'
db.add_column('order_paymentevent', 'reference',
self.gf('django.db.models.fields.CharField')(default='', max_length=128, blank=True),
keep_default=False)
def backwards(self, orm):
| # Deleting field 'PaymentEvent.reference'
db.delete_column('order_paymentevent', 'reference')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'i | s_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.m |
Urinx/Project_Euler_Answers | 033.py | Python | gpl-2.0 | 995 | 0.030151 | #!/usr/bin/env python
#coding:utf-8
"""
Digit canceling fractions
The fraction 49/98 is a curious fraction, as an inexperienced mathematician in attempting to simplify it may incorrectly believe that 49/98 = 4/8, which is correct, is obtained by | cancelling the 9s.
We shall consider fractions like, 30/50 = 3/5, to be trivial examples.
There are exactly four non-trivial examples of this type of fraction, less than one in value, and containing two digits in the numerator and denominator.
If | the product of these four fractions is given in its lowest common terms, find the value of the denominator.
"""
'''
ab/bc=a/c
b=[2-9]
a<b
'''
def answer():
denominator=numerator=1
for b in xrange(2,10):
for a in xrange(1,b):
for c in xrange(1,10):
m1=(10*a+b)/float(10*b+c)
m2=a/float(c)
if m1==m2:
numerator*=a
denominator*=c
print denominator/numerator
import time
tStart=time.time()
answer()
print 'run time=',time.time()-tStart
# 100
# run time= 0.0003981590271 |
huggingface/pytorch-transformers | src/transformers/models/herbert/__init__.py | Python | apache-2.0 | 1,766 | 0.000566 | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import _BaseLazyModule, is_tokenizers_available
_import_structure = {
"tokenization_herbert": ["HerbertTokenizer"],
}
if is_tokenizers_available():
_import_structure["tokenization_herbert_fast"] = ["HerbertTokenizerFast"]
if TYPE_CHECKING:
from .tokenization_herbert import HerbertTokenize | r
if is_tokenizers_available():
from .tokenization_herbert_fast import HerbertTokenizerFast
else:
import importlib
import os
import sys
class _LazyModule(_BaseLazyModule):
"""
Module class that surfaces all objects but only performs associated imports when the ob | jects are requested.
"""
__file__ = globals()["__file__"]
__path__ = [os.path.dirname(__file__)]
def _get_module(self, module_name: str):
return importlib.import_module("." + module_name, self.__name__)
sys.modules[__name__] = _LazyModule(__name__, _import_structure)
|
oVirt/ovirt-hosted-engine-setup | src/ovirt_hosted_engine_setup/connect_storage_server.py | Python | lgpl-2.1 | 1,127 | 0 | #
# ovirt-hosted-engine-setup -- ovirt hosted engine setup
# Copyright (C) 2015 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY | ; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
"""Connect storage server"""
from ovirt_hosted_engine_ha.c | lient import client
from ovirt_hosted_engine_setup import constants as ohostedcons
if __name__ == "__main__":
ha_cli = client.HAClient()
ha_cli.connect_storage_server(
timeout=ohostedcons.Const.STORAGE_SERVER_TIMEOUT,
)
|
schieb/angr | angr/procedures/posix/readdir.py | Python | bsd-2-clause | 2,207 | 0.009515 | import angr
from collections import namedtuple
import logging
l = logging.getLogger(name=__name__)
Dirent = namedtuple('dirent', ('d_ino', 'd_off', 'd_reclen', 'd_type', 'd_name'))
class readdir(angr.SimProcedure):
struct = None
condition = None
def run(self, dirp): # pylint: disable=arguments-differ
# TODO: make sure argument is actually a dir struct
if self.state.arch.name != 'AMD64':
| l.error('readdir SimProcedure is only implemented for AMD64')
return 0
self._build_amd64()
self.instrument()
malloc = angr.SIM_PROCEDURES['libc']['malloc']
pointer = self.inline_call(malloc, 19 + 256).ret_expr
self._store_amd64(pointer)
return self.state.solver.If(self.condition, pointer, self.state.solver.BVV(0, self.state.arch.bits))
def instrument(self):
"""
Override this function to instru | ment the SimProcedure.
The two useful variables you can override are self.struct, a named tuple of all the struct
fields, and self.condition, the condition for whether the function succeeds.
"""
pass
def _build_amd64(self):
self.struct = Dirent(self.state.solver.BVV(0, 64), # d_ino
self.state.solver.BVV(0, 64), # d_off
self.state.solver.BVS('d_reclen', 16, key=('api', 'readdir', 'd_reclen')), # d_reclen
self.state.solver.BVS('d_type', 8, key=('api', 'readdir', 'd_type')), # d_type
self.state.solver.BVS('d_name', 255*8, key=('api', 'readdir', 'd_name'))) # d_name
self.condition = self.state.solver.BoolS('readdir_cond') # TODO: variable key
def _store_amd64(self, ptr):
stores = lambda offset, val: self.state.memory.store(ptr + offset, val, endness='Iend_BE')
storei = lambda offset, val: self.state.memory.store(ptr + offset, val, endness='Iend_LE')
storei(0, self.struct.d_ino)
storei(8, self.struct.d_off)
storei(16, self.struct.d_reclen)
storei(18, self.struct.d_type)
stores(19, self.struct.d_name)
stores(19+255, self.state.solver.BVV(0, 8))
|
poppogbr/genropy | packages/test15/webpages/tools/server_store.py | Python | lgpl-2.1 | 1,125 | 0.006222 | # -*- coding: UTF-8 -*-
#
"""ServerStore tester"""
class GnrCustomWebPage(object):
py_requires = "gnrcomponents/testhandler:TestHandlerFull,storetester:StoreTester"
dojo_theme = 'claro'
def test_1_current_page(self, pane):
"""On current page """
self.common_form(pane, datapath='test_1')
def test_2_external_page(self, pan | e):
"""Set in external store"""
center = self.common_pages_container(pane, height='350px', background='whitesmoke',
datapath='test_2')
self.common_form(center)
def test_3_server_data(self, pane):
"""Server shared data """
center = self.common_pages_container(pane, height='350px', b | ackground='whitesmoke',
datapath='test_3')
center.data('.foo.bar', _serverpath='xx')
fb = center.formbuilder(cols=1, border_spacing='3px')
fb.textbox(value='^.foo.bar', lbl='Server store value')
fb.textbox(value='^.foo.baz', lbl='Value not in server subscribed path')
fb.button('Ping', action='genro.ping()')
|
tychoish/dtf | docs/bin/makefile_builder.py | Python | apache-2.0 | 6,931 | 0.002453 | #!/usr/bin/python
# Copyright 2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Sam Kleinman (tychoish)
import os.path
import os
def print_output(list):
for line in list:
print(line)
def write_file(list, filename):
dirpath = filename.rsplit('/', 1)[0]
if os.path.isdir(dirpath) is False:
os.mkdir(dirpath)
with open(filename, 'w') as f:
for line in list:
f.write(line + '\n')
class BuildFileError(Exception):
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is None:
return "Error in handling BuildFile."
else:
return "Error: " + self.msg
class BuildFile(object):
def __init__(self, buildfile=None):
self.builder = { '_all' : [] }
self.buildfile = self.builder['_all']
if buildfile is None:
pass
elif type(buildfile) is list:
for line in buildfile:
if type(line) is list:
raise BuildFileError('Cannot instantiate BuildFile with nested list.')
break
else:
self.builder['_all'].append(line)
else:
raise BuildFileError('Instantiated BuildFile object with malformed argument.')
# the following method is used internally to constrcd uct and
# maintain the internal representation of the buildfile.
def _add_to_builder(self, data, block, raw=False):
def add(data, block):
if block is '_all':
pass
else:
self.buildfile.append(data)
if block in self.builder:
self.builder[block].append(data)
else:
self.builder[block] = [data]
if raw is True:
for line in data:
add(line, block)
elif type(data) is not str and raw is True:
raise BuildFileError('Added malformed data to BuildFile.')
else:
add(data, block)
def _add_to_builder(self, data, block, raw=False):
def add(data, block):
if block is '_all':
pass
else:
self.buildfile.append(data)
if block in self.builder:
self.builder[block].append(data)
else:
self.builder[block] = [data]
if raw is True:
for line in data:
add(line, block)
elif type(data) is not str:
raise BuildFileError('Added malformed data to BuildFile.')
else:
add(data, block)
# The following methods produce output for public use.
def get_block(self, block='_all'):
return self.builder[block]
def print_content(self, block_order=['_all']):
output = []
if type(block_order) is not list:
raise BuildFileError('Cannot print blocks not specified as a list.')
else:
for block in block_order:
output.append(self.builder[block])
output = [item for sublist in output for item in sublist]
print_output(output)
def print_block(self, block='_all'):
if block not in self.builder:
raise BuildFileError('Error: ' + block + ' not specified in buildfile')
else:
print_output(self.builder[block])
def write(self, filename, block_order=['_all']):
output = []
if type(block_order) is not list:
raise BuildFileError('Cannot write blocks not specified as a list.')
else:
for block in block_order:
output.append(self.builder[block])
output = [item for sublist in output for item in sublist]
write_file(output, filename)
def write_block(self, filename, block='_all'):
if block not in self.builder:
raise BuildFileError('Error: ' + block + ' not specified in buildfile')
else:
write_file(self.builder[block], filename)
class MakefileError(BuildFileError):
pass
class MakefileBuilder(BuildFile):
def __init__(self, makefile=None):
super(MakefileBuilder, self).__init__(makefile)
self.makefile = self.buildfile
# The following two methods allow more direct interaction with the
# internal representation of the makefile than the other methods.
def block(self, block):
if block in self. | builder:
raise MakefileError('Cannot add "' + block + '" to Makefile. ' + block + ' already exists.')
else:
self.builder[block] = []
self.section_break(block, block)
def raw(self, lines, block='_all'):
if type(lines) is list:
o = []
for line in lines:
if type(line) is list:
raise MakefileError('Cannot add nested lists to a Makefile | with raw().')
else:
o.append(line)
self._add_to_builder(data=o, block=block, raw=True)
else:
raise MakefileError('Cannot add non-list raw() content to Makefile.')
# The following methods constitute the 'public' interface for
# building makefile.
def section_break(self, name, block='_all'):
self._add_to_builder('\n\n########## ' + name + ' ##########', block)
def comment(self, comment, block='_all'):
self._add_to_builder('\n# ' + comment, block)
def newline(self, n=1, block='_all'):
for i in range(n):
self._add_to_builder('', block)
def target(self, target, dependency=None, block='_all'):
if dependency is None:
self._add_to_builder(target + ':', block)
else:
self._add_to_builder(target + ':' + dependency, block)
def var(self, variable, value, block='_all'):
self._add_to_builder(variable + ' = ' + value, block)
def append_var(self, variable, value, block='_all'):
self._add_to_builder(variable + ' += ' + value, block)
def job(self, job, display=False, ignore=False, block='_all'):
o = '\t'
if display is False:
o += '@'
if ignore is True:
o += '-'
o += job
self._add_to_builder(o, block)
def message(self, message, block='_all'):
m = 'echo ' + message
self.job(job=m, display=False, block=block)
msg = message
|
melrcm/Optimizepy | allocation_sharpe_optimizer.py | Python | lgpl-2.1 | 7,000 | 0.016 | import datetime as dt
import numpy as np
import simulate_portfolio_allocation as smlt
import all_permutes as permutes
import get_data
def allocation_op(symbols, exchange, allow_short, days = None, filter_symbols = 5, data_allocate = None, dt_start = None, dt_end = None):
''' Function ALLOCATION / SHARPE RATION OPTIMIZER
Tests all possible portfolio allocations for **symbols** and determines which one had the **best Sharpe ratio** for the period [today - *days*] to today _or_ from *dt_start* to *dt_end*.
days = days backwards from today. It will measure *real* days, not just trading days.
dt_start and dt_days = dates (from) and (to). It will measure trading days. Alternative to "days".
na_price = array of prices for the symbols, including a first column with dates. Optional.
allow_short = Is going short allowed in the model?
filter_symbols = (int) calculate the sharpe ratio for all symbols inputed
and only choose and try different allocations for those with the highest
sharpe. 0 to deactivate. '''
# I'll copy symbols because manipulating it for some reason changes its value for higher scopes as backtester ???
symbols_op = symbols[:]
# 0. Start and End dates from range of days
if days == None and (dt_start == None or dt_end == None):
raise Exception("Either (days) or (dt_start and dt_end) must be inputed.")
elif days != None:
dt_end = dt.datetime.now()
dt_start = dt_end-dt.timedelta(days)
else:
days = (dt_end-dt_start).days
# 1. Get data, if needed
if data_allocate == None:
data_allocate = get_data.get_dat | a(symbols, exchange, dt_start, dt_end)
# 2. Fli | p symbols & na_price if needed
flip_symbols = symbols[:]
if allow_short == 1:
long_or_short = data_allocate[-1,1:] / data_allocate[0,1:]
for i in range(len(long_or_short)):
if long_or_short[i] < 1:
data_allocate[:,i+1] = np.flipud(data_allocate[:,i+1])
flip_symbols[i] = '-'+flip_symbols[i]
# 3. Filter symbols, if needed, copy values and assign corresponding length to matrix
print
if filter_symbols == 0 or len(symbols) <= filter_symbols:
na_price_chosen = data_allocate
permutes_length = len(symbols_op)
filter_chosen = range(len(symbols_op)+1)
else:
permutes_length = filter_symbols
print 'Pre-filtering symbols...'
print
filter_sharpe = np.array([])
for i in range(len(symbols_op)):
this_allocation = np.zeros(len(symbols_op))
this_allocation[i] = 1.0
get_sharpe, get_returns = smlt.simulate(data_allocate[:,1:], dt_start, dt_end, symbols_op, this_allocation)
filter_sharpe = np.insert(filter_sharpe, len(filter_sharpe), get_sharpe)
filter_chosen = np.array([])
for i in range(filter_symbols):
chosen_sym_idx = np.argmax(filter_sharpe)
filter_chosen = np.insert(filter_chosen, len(filter_chosen), chosen_sym_idx, axis=0)
filter_sharpe[chosen_sym_idx] = 0
# Select symbols
filter_chosen = filter_chosen.astype(int)
symbols_op = list(np.array(symbols_op)[filter_chosen])
flip_symbols = list(np.array(flip_symbols)[filter_chosen])
# Select columns from na_price and also column 0 (dates)
filter_chosen += 1
filter_chosen = np.insert(filter_chosen, 0, 0, axis=0)
na_price_chosen = data_allocate[:,filter_chosen]
# 4. Create a matrix for all possible allocations that sum up to 1
# with 0.1 steps
print
matrix = permutes.permutes_f(0.1, permutes_length)
print
# 5. Simulate Sharpe ratio for each allocation
num_rows = len(matrix[:,0])
max_sharpe = 0.0
print 'Computing allocations ('+str(num_rows)+' left)...'
print
max_sharpe = 0
max_returns = 0
for i in range(num_rows):
# Print indicator of progress each 5 computations
if i%5 == 0:
print i,'of',num_rows
this_allocation = np.array(matrix[i,:])
get_sharpe, get_returns = smlt.simulate(na_price_chosen[:,1:], dt_start, dt_end, symbols_op, this_allocation)
# If this sharpe is better than the one saved, then save this one instead
if get_sharpe > max_sharpe:
max_sharpe = get_sharpe
max_returns = get_returns
best_allocation = matrix[i,:]
print
print '-------------------------------------'
print
print 'BETTER ALLOCATION FOUND'
print 'Sharpe ratio:',get_sharpe
print
print flip_symbols
print this_allocation
print
print '-------------------------------------'
print
print
print '-------------------------------------'
print
# If the best allocation loses money, then don't invest in anything
if max_returns <= 1 or max_sharpe <= 0:
max_returns = 1
trading_days = ((dt_end-dt_start).days * 252) / 365
max_sharpe = 1 * np.sqrt(trading_days)
best_allocation = np.zeros(len(symbols_op))
time_n = dt.datetime.now().strftime('%Y-%m-%d')
one = 'For a '+str(days)+' days period ('+str(time_n)+'):'
two = '-------------------------------------'
three = 'Symbols (minus sign = going short): '+str(flip_symbols)
four = 'Best allocation: '+str(best_allocation)
five = 'Max sharpe: '+str(max_sharpe)
six = 'Cumulative returns: '+str((max_returns-1)*100)+'% ('+str(days)+' days period)'
print one
print two
print three
print four
print five
print six
#SAVE FILE
#filename = time_n+'.'+dt.datetime.now().strftime('%H%M%S%f')+'.txt'
#save = one+'\r\n '+two+'\r\n '+three+'\r\n '+four+'\r\n '+five+'\r\n '+six+'\r\n'
#text_file = open(filename, "w")
#text_file.write(save)
#text_file.close()
best_allocation = list(best_allocation)
return best_allocation, flip_symbols, filter_chosen
# Call function example
# DEPRECATED
#dt_start = dt.datetime(2011, 1, 1)
#dt_end = dt.datetime(2011, 12, 31)
#symbols = ['AAPL', 'GLD', 'GOOG', 'XOM']
#dt_start = dt.datetime(2010, 1, 1)
#dt_end = dt.datetime(2010, 12, 31)
#symbols = ['BRCM', 'TXN', 'AMD', 'ADI']
#dt_start = dt.datetime(2011, 1, 1)
#dt_end = dt.datetime(2011, 12, 31)
#symbols = ['BRCM', 'ADBE', 'AMD', 'ADI']
#exchange = 'fx'
#days = 30
#symbols = ['EURUSD', 'EURGBP', 'EURCHF', 'EURCAD', 'EURJPY']
#allow_short = 1
#exchange = 'fx'
#days = 30
#symbols = ['EURUSD', 'EURGBP', 'EURCHF', 'EURCAD', 'EURJPY']
#allow_short = 1
#filter_symbols = 3
#allocation_op(symbols, exchange, allow_short, days, filter_symbols)
|
neurokernel/retina-lamina | examples/retlam_demo/retlam_demo.py | Python | bsd-3-clause | 11,654 | 0.002231 | #!/usr/bin/env python
import os, resource, sys
import argparse
import numpy as np
import networkx as nx
import neurokernel.core_gpu as core
from neurokernel.pattern import Pattern
from neurokernel.tools.logging import setup_logger
from neurokernel.tools.timing import Timer
from neurokernel.LPU.LPU import LPU
import retina.retina as ret
import lamina.lamina as lam
import retina.geometry.hexagon as r_hx
import lamina.geometry.hexagon as l_hx
from retina.InputProcessors.RetinaInputProcessor import RetinaInputProcessor
from neurokernel.LPU.OutputProcessors.FileOutputProcessor import FileOutputProcessor
from retina.screen.map.mapimpl import AlbersProjectionMap
from retina.configreader import ConfigReader
from retina.NDComponents.MembraneModels.PhotoreceptorModel import PhotoreceptorModel
from retina.NDComponents.MembraneModels.BufferPhoton import BufferPhoton
from retina.NDComponents.MembraneModels.BufferVoltage import BufferVoltage
import gen_input as gi
dtype = np.double
RECURSION_LIMIT = 80000
def setup_logging(config):
'''
Logging is useful for debugging
purposes. By default errors that
are thrown during simulation do
not appear on screen.
'''
log = config['General']['log']
file_name = None
screen = False
if log in ['file', 'both']:
file_name = 'neurokernel.log'
if log in ['screen', 'both']:
screen = True
logger = setup_logger(file_name=file_name, screen=screen)
def get_retina_id(i):
return 'retina{}'.format(i)
def get_lamina_id(i):
return 'lamina{}'.format(i)
def add_retina_LPU(config, retina_index, retina, manager):
'''
This method adds Retina LPU and its parameters to the manager
so that it can be initialized later. Depending on configuration
input can either be created in advance and read from file or
generated during simulation by a generator object.
--
config: configuration dictionary like object
i: identifier of eye in case more than one is used
retina: retina array object required for the generation of
graph.
manager: manager object to which LPU will be added
generator: generator object or None
'''
dt = config['General']['dt']
debug = config['Retina']['debug']
time_sync = config['Retina']['time_sync']
input_filename = config['Retina']['input_file']
output_filename = config['Retina']['output_file']
gexf_filename = config['Retina']['gexf_file']
suffix = config['General']['file_suffix']
output_file = '{}{}{}.h5'.format(output_filename, retina_index, suffix)
gexf_file = '{}{}{}.gexf.gz'.format(gexf_filename, retina_index, suffix)
inputmethod = config['Retina']['inputmethod']
if inputmethod == 'read':
print('Generating input files')
with Timer('input generation'):
input_processor = RetinaFileInputProcessor(config, retina)
else:
print('Using input generating function')
input_processor = RetinaInputProcessor(config, retina)
output_proc | essor = FileOutputProcessor([('V',None)], output_file, sample_interval=1)
# retina also allows a subset of its graph to be t | aken
# in case it is needed later to split the retina model to more
# GPUs
G = retina.get_worker_nomaster_graph()
nx.write_gexf(G, gexf_file)
(comp_dict, conns) = LPU.graph_to_dicts(G)
retina_id = get_retina_id(retina_index)
extra_comps = [PhotoreceptorModel, BufferPhoton]
manager.add(LPU, retina_id, dt, comp_dict, conns,
device = retina_index, input_processors = [input_processor],
output_processors = [output_processor],
debug=debug, time_sync=time_sync, extra_comps = extra_comps)
def add_lamina_LPU(config, lamina_index, lamina, manager):
'''
This method adds Lamina LPU and its parameters to the manager
so that it can be initialized later.
--
config: configuration dictionary like object
i: identifier of eye in case more than one is used
lamina: lamina array object required for the generation of
graph.
manager: manager object to which LPU will be added
generator: generator object or None
'''
output_filename = config['Lamina']['output_file']
gexf_filename = config['Lamina']['gexf_file']
suffix = config['General']['file_suffix']
dt = config['General']['dt']
debug = config['Lamina']['debug']
time_sync = config['Lamina']['time_sync']
output_file = '{}{}{}.h5'.format(output_filename, lamina_index, suffix)
gexf_file = '{}{}{}.gexf.gz'.format(gexf_filename, lamina_index, suffix)
G = lamina.get_graph()
nx.write_gexf(G, gexf_file)
(comp_dict, conns) = LPU.graph_to_dicts(G)
lamina_id = get_lamina_id(lamina_index)
extra_comps = [BufferVoltage]
output_processor = FileOutputProcessor(
[('V', None)], output_file,
sample_interval=1)
manager.add(LPU, lamina_id, dt, comp_dict, conns,
output_processors = [output_processor],
device=lamina_index+1, debug=debug, time_sync=time_sync,
extra_comps = extra_comps)
def connect_retina_lamina(config, index, retina, lamina, manager):
'''
The connections between Retina and Lamina follow
the neural superposition rule of the fly's compound eye.
See more information in NeurokernelRFC#2.
Retina provides an interface to make this connection easier.
--
config: configuration dictionary like object
i: identifier of eye in case more than one is used
retina: retina array object
lamina: lamina array object
manager: manager object to which connection pattern will be added
'''
retina_id = get_retina_id(index)
lamina_id = get_lamina_id(index)
print('Connecting {} and {}'.format(retina_id, lamina_id))
retina_selectors = retina.get_all_selectors()
lamina_selectors = []#lamina.get_all_selectors()
with Timer('creation of Pattern object'):
from_list = []
to_list = []
# accounts neural superposition
rulemap = retina.rulemap
for ret_sel in retina_selectors:
if not ret_sel.endswith('agg'):
# format should be '/ret/<ommid>/<neuronname>'
_, lpu, ommid, n_name = ret_sel.split('/')
# find neighbor of neural superposition
neighborid = rulemap.neighbor_for_photor(int(ommid), n_name)
# format should be '/lam/<cartid>/<neuronname>'
lam_sel = lamina.get_selector(neighborid, n_name)
# setup connection from retina to lamina
from_list.append(ret_sel)
to_list.append(lam_sel)
from_list.append(lam_sel+'_agg')
to_list.append(ret_sel+'_agg')
lamina_selectors.append(lam_sel)
lamina_selectors.append(lam_sel+'_agg')
pattern = Pattern.from_concat(','.join(retina_selectors),
','.join(lamina_selectors),
from_sel=','.join(from_list),
to_sel=','.join(to_list),
gpot_sel=','.join(from_list+to_list))
nx.write_gexf(pattern.to_graph(), retina_id+'_'+lamina_id+'.gexf.gz',
prettyprint=True)
with Timer('update of connections in Manager'):
manager.connect(retina_id, lamina_id, pattern)
def start_simulation(config, manager):
steps = config['General']['steps']
with Timer('retina and lamina simulation'):
manager.spawn()
manager.start(steps=steps)
manager.wait()
def change_config(config, index):
'''
Useful if one wants to run the same simulation
with a few parameters changing based on index value
Need to modify else part
Parameters
----------
config: configuration object
|
inspirehep/invenio-search | invenio_search/registry.py | Python | gpl-2.0 | 5,066 | 0.000395 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Registries for search module."""
import os
from flask_registry import RegistryError, PkgResourcesDirDiscoveryRegistry, \
ModuleAutoDiscoveryRegistry, RegistryProxy
from werkzeug.utils import cached_property
from invenio_ext.registry import DictModuleAutoDiscoverySubRegistry
from invenio.utils.datastructures import LazyDict
from invenio.utils.memoise import memoize
searchext = RegistryProxy('searchext', ModuleAutoDiscoveryRegistry,
'searchext')
class FacetsRegistry(DictModuleAutoDiscoverySubRegistry):
"""Registry for facets modules.
Serves also modules sets and their configuration
for specific collections.
"""
def keygetter(self, key, original_value, new_value):
"""Compute the key for a value being registered.
The key is the facet name stored in facet module.
:param key: Key if provided by the user. Defaults to None.
:param value: Value being registered. FacetBuilder object
"""
return new_value.name
def valuegetter(self, value):
"""Return FacetBuilder from inside the module.
:param value: loaded python module with FacetBuilder instance
stored in facet property
"""
if self.facet_plugin_checker(value):
return value.facet
@classmethod
def facet_plugin_checker(cls, plugin_code):
"""Handy function to check facet plugin.
:param plugin_code: a module with facet definition - should have facet
variable
"""
from invenio_search.facet_builders import FacetBuilder
if 'facet' in dir(plugin_code):
candidate = getattr(plugin_code, 'facet')
if isinstance(candidate, FacetBuilder):
return candidate
@memoize
def get_facets_for_collection(self, collection_id):
"""Return facets set for a collection.
:param collection_id: the collection id for requested facets set
"""
from invenio_collections.models import FacetCollection
facets_conf = FacetCollection.query\
.filter(FacetCollection.id_collection == collection_id)\
.order_by(FacetCollection.order)\
.all()
collection_facets = []
for facet in facets_conf:
if facet.facet_name not in self.keys():
raise RegistryError(
'Facet %s is not available. Please check if the facet '
'is located in package specified in PACKAGES_EXCLUDE or '
'in PACKAGES_FACETS_EXCLUDE configuration.'
% facet.facet_name)
collection_facets.append(self.get(facet.facet_name))
return collection_facets
@cached_property
def default_facets(self):
"""Return default set of facets."""
return self.get_facets_for_collection(1)
def get_facets_config(self, collection, qid):
"""Return facet config for the collection.
If no configuration found returns the default facets set.
:param collection: Collection object facets matching which are returned
:param qid: md5 hash of search parameters generated by
get_search_query_id() from invenio_search.cache
"""
if collection and self.get_facets_for_collection(collection.id):
facets_set = self.get_facets_for_collection(collection.id)
else:
facets_set = self.default_facets
return [facet.get_conf(collection=collection, qid=qid)
for facet in facets_set]
facets = RegistryProxy('facets', FacetsRegistry, 'facets')
units = RegistryProx | y(
'searchext.units', DictModuleAutoDiscoverySubRegistry, 'units',
keygetter=lambda key, value, new_value: value.__name__.split('.')[-1],
valuegetter=lambda value: value.search_unit,
registry_namespace=searchext,
)
mappings_proxy = RegistryProxy(
"searchext.mappings", PkgResourcesDirDiscoveryRegistry, "mappings",
registry_namespace=searchext
)
def create_mappings_lookup():
out = {}
for f in mappings_proxy:
if os.path.basename(f) no | t in out:
out[os.path.basename(f)] = f
return out
mappings = LazyDict(create_mappings_lookup)
___all__ = ('mappings_proxy', 'mappings')
|
nemothekid/Colosseum--Year-3XXX | GameClient.py | Python | mit | 7,613 | 0.005517 | import Network
from time import sleep
from threading import Thread
CALL_ROOMLIST = 0
CALL_WEAPLIST = 1
CALL_PLAYERLIST = 2
CALL_NEWPLAYER = 3
CALL_PLAYERLEFT = 4
CALL_CHAT = 5
CALL_PLAYERDAT = 6
CALL_ROOMSTAT = 7
CALL_LEAVEROOM = 8
CALL_SHOOT = 9
CALL_SCORE = 10
class GameClient(Network.Client):
CONNECTING = 0
JOINING_ROOM = 1
LEAVING_ROOM = 2
rooms = []
players = []
weapList= []
scores = {}
response = {}
currRoomInfo = None
main = None
status = -1
charId = 0
roomState = -1
roomId = 0
roomName = ""
stateDict = {
"WAITING":0,
"PLAYING":1,
"DEAD":99
}
invStateDict = {
0:"WAITING",
1:"PLAYING",
99:"DEAD"
}
winnerId = -1
def __init__(self, main):
super(GameClient, self).__init__()
self.main = main
self.rooms = []
self.scores = {}
self.players =[]
self.weapList = []
self.response = {}
def connect(self, name, addr, evt=False): #Blocks
self.status = self.CONNECTING
super(GameClient, self).connect(name, addr)
if evt:
self.onConnect(self.complete(self.CONNECTING))
else:
return self.complete(self.CONNECTING)
def connect_async(self, name, addr): #Doesn't block
t = Thread(target=self.connect, args=[name, addr, True])
t.start()
# NETWORK FUNCTIONS
def complete(self, event, timeout = 2):
waited = 0
while event == self.status and waited <= timeout:
sleep(.1)
waited += .1
if waited >= timeout:
return False
return self.response[event]
def done(self, event, response):
self.response[event] = response
self.status = -1
def playerById(self, pId):
low = 0
high = len(self.players) - 1
while low <= high:
mid = (low + high) >> 1
midId = self.players[mid][0]
if midId < pId:
low = mid + 1
elif midId > pId:
high = mid - 1
else:
return mid
return None
def getPlayers(self):
return self.players
def getRooms(self):
return self.rooms
def clearScores(self):
self.scores = {}
# EVENT FUNCTIONS
def onConnect(self, result):
self.main.onConnect(result)
def onRoomList(self, data):
self.rooms = data
self.main.handleNetworkCall(CALL_ROOMLIST, (self.rooms,))
def onWeapList(self, data):
self.weapList = data
self.main.handleNetworkCall(CALL_WEAPLIST, (self.weapList,))
def onPlayerList(self, playerList, roomId, roomState, yourId):
self.players = playerList
self.playerId = yourId
self.players.sort()
self.roomId = roomId
self.roomState = roomState
if self.status in [self.CONNECTING, self.JOINING_ROOM, self.LEAVING_ROOM]:
self.done(self.status, True)
self.main.handleNetworkCall(CALL_PLAYERLIST, (self.players,))
def onNewPlayer(self, player):
#playername = player[0][:player[0].find('\00')]
self.players.append(player)
self.players.sort()
self.main.handleNetworkCall(CALL_NEWPLAYER, (player,))
def onPlayerLeft(self, data):
playerPos = self.playerById(data[0])
player = self.players[playerPos]
del self.players[playerPos]
if data[2] != -1:
self.players[self.playerById(data[2])] = self.changeTuple(self.players[self.playerById(data[2])], 4, True)
self.main.handleNetworkCall(CALL_PLAYERLEFT, (player,))
def changeTuple(self, tup, key, value):
flist = list(tup)
flist[key] = value
return tuple(flist)
def onChat(self, data):
self.main.handleNetworkCall(CALL_CHAT, (data,))
def onPlayerData(self, data):
self.main.handleNetworkCall(CALL_PLAYERDAT, (data,))
def onRoomStat(self, data):
self.winnerId = data[1]
self.main.handleNetworkCall(CALL_ROOMSTAT, (data,))
#if data[0] == 0:
# self.main.endGame()
#elif data[0] == 1:
# print "starting game"
# self.main.startGame()
def onRoomSwitch(self, action, result):
self.main.onRoomSwitch(action, result)
return result
def onLeaveRoom(self):
if self.status in [self.JOINING_ROOM]:
self.done(self.status, False)
def onShoot(self, bulletdata):
self.main.handleNetworkCall(CALL_SHOOT, (bulletdata,))
def onScore(self, score):
self.scores[score[0]] = score[1], score[2]
self.scores[score[3]] = score[4], score[5]
self.main.handleNetworkCall(CALL_SCORE, (score,))
def onChangeChar(self, charId, playerId):
playerPos = self.playerById(playerId)
player = self.players[playerPos]
self.players[playerPos] = self.changeTuple(self.players[playerPos], 3, charId)
def onDisconnect(self):
self.main.onDisconnect()
## SENDING FUNCTIONS
def jo | inRoom(self, roomid, roomName, block=False):
if block:
| self.status = self.JOINING_ROOM
self.sendDataReliable(Network.Structs.joinRoom.dataType, Network.Structs.joinRoom.pack(roomid)).join()
# This function blocks...
return self.onRoomSwitch(self.JOINING_ROOM, self.complete(self.JOINING_ROOM))
else:
self.winnerId = -1
self.roomName = roomName
Thread(target=self.joinRoom, args=[roomid, roomName, True]).start()
def makeRoom(self, roomName, block=False):
if block:
self.status = self.JOINING_ROOM
self.sendDataReliable(Network.Structs.makeRoom.dataType, Network.Structs.makeRoom.pack(len(roomName))+roomName)
return self.onRoomSwitch(self.JOINING_ROOM, self.complete(self.JOINING_ROOM))
else:
self.winnerId = -1
self.roomName = roomName
Thread(target=self.makeRoom, args=[roomName, True]).start()
def leaveRoom(self, block=False):
if block:
self.status = self.LEAVING_ROOM
self.sendDataReliable(Network.Structs.leaveRoom.dataType, Network.Structs.leaveRoom.pack())
return self.onRoomSwitch(self.LEAVING_ROOM, self.complete(self.LEAVING_ROOM))
else:
self.winnerId = -1
Thread(target=self.leaveRoom, args=[True]).start()
def startGame(self):
self.sendDataReliable(Network.Structs.startGame.dataType, Network.Structs.startGame.pack(0))
def sendGameData(self, gameData):
self.sendData(Network.Structs.playerDat.dataType, gameData)
def sendShoot(self, bullet):
self.sendDataReliable(Network.Structs.shoot.dataType, Network.Structs.shoot.pack(-1, bullet.x, bullet.y, bullet.angle, bullet.type))
def setCharacter(self, charId):
self.sendDataReliable(Network.Structs.setCharacter.dataType, Network.Structs.setCharacter.pack(charId, 0))
self.charId = charId
def sendDeath(self, killerid):
self.sendDataReliable(Network.Structs.onDeath.dataType, Network.Structs.onDeath.pack(killerid))
def sendPicked(self, serverId):
self.sendDataReliable(Network.Structs.takeWeap.dataType, Network.Structs.takeWeap.pack(serverId))
def sendChat(self, data):
self.sendDataReliable(Network.Structs.preChat.dataType, Network.Structs.preChat.pack(len(data)) + data)
def __del__(self):
super(GameClient, self).__del__()
|
stickybath/BetaMaleBot | src/utilRegex/regex.py | Python | gpl-3.0 | 2,247 | 0.012461 | import re
from utilRegex import database
class regex:
def __init__(self, botCfg):
"""class initialization function
"""
#intitialize database variables
self.db = database()
#initialize regex variables
self.phrase = ''
self.url = ''
#initialize status variables
self.phraseReady = False
self.urlReady = False
#load database configuration settings
self.db.loadConfig(botCfg)
def buildPhrase(self):
"""compile phrase regex object
builds a regex object that includes
existing phrases from the phrase table
in the database.
returns:
success: True
failure: False
"""
#initialize function
self.phraseReady = False
#open database connection
try:
self.db.connect()
except:
print 'utilRegex/regex.buildPhrase: failed to connect to database.'
return False
#pull records from database
try:
self.db.cursor.execute('SELECT *' | + \
' FROM phraseprint()' + \
' f(id bigint, phrase text, username text)')
records = self.db.cursor.fetchall()
except:
print 'utilRegex/regex.buildPhrase: failed to retrieve | records from database.'
self.db.cursor.close()
self.db.disconnect()
return False
#close database connection
self.db.cursor.close()
self.db.disconnect()
#build pattern string
if len(records) > 0: #only build the string if records are present
pattern = ''.join(['%s|' % (re.escape(record[1])) for record in records])
pattern = pattern[:-1]
else: #otherwise a placeholder (literally xD)
pattern = re.escape('a placeholder')
pattern = r'(^|\s|[a-z]-)(%s)+([a-z]{1,4})?(\'[a-z]{1,4})?(\s|\.|,|\?|\!|$)' % (pattern)
#compile the regex object
self.phrase = re.compile(pattern, re.IGNORECASE)
#exit the function
self.phraseReady = True
return True
|
jawr/kontrolvm | apps/network/urls.py | Python | mit | 239 | 0.004184 | from d | jango.conf.urls.defaults import *
urlpatterns = patterns('apps.network.views',
url(r'^add/', 'add'),
url(r'^edit/', 'edit'),
url(r'^delete/(?P<pk>\d+)/', 'delete'),
url(r'^(?P<pk>\d+)/', 'overview'),
url(r'$', 'i | ndex'),
)
|
opendaylight/spectrometer | server/spectrometer/api/gerrit.py | Python | epl-1.0 | 5,172 | 0.00116 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# @License EPL-1.0 <http://spdx.org/licenses/EPL-1.0>
##############################################################################
# Copyright (c) 2016 The Linux Foundation and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
##############################################################################
from flask import Blueprint
from flask import current_app as app
from flask import jsonify
from flask import request
from spectrometer.handlers.gerrit import GerritHandler
from spectrometer.utils import check_parameters
gerritapi = Blueprint('gerrit', __name__)
@gerritapi.route('/branches')
def branches():
"""Returns a list of branches in a given repository by querying Gerrit.
GET /gerrit/branches?param=<value>
:arg str project: Project to query branches from. (required)
JSON::
{
"branches": [
{
"ref": "refs/heads/stable/beryllium",
"revision": "8f72284f3808328604bdff7f91a6999094f7c6d7"
},
...
]
}
"""
mapping = {
'project': request.args.get('project', None),
}
result = check_parameters(mapping)
if not result:
gerrit = GerritHandler(app.config['GERRIT_URL'])
branches = gerrit.project_branches_list(mapping['project'])
if not branches:
result = {'error': 'No branches found for {0}.'.format(mapping['project'])}
else:
result = {'branches': branches}
return jsonify(result)
@gerritapi.route('/merged_changes')
def merged_changes():
"""Retur | ns a list of merged changes in a given repository by querying Gerrit.
GET /gerrit/changes?param=<value>
:arg str project: Project to query changes from. (required)
:arg str branch: Branch to pull changes from. (default: master)
JSON::
{ |
"changes": [
{
"_number": 37706,
"branch": "master",
"change_id": "I4168e023b77bfddbb6f72057e849925ba2dffa17",
"created": "2016-04-18 02:42:33.000000000",
"deletions": 0,
"hashtags": [],
"id": "spectrometer~master~I4168e023b77bfddbb6f72057e849925ba2dffa17",
"insertions": 119,
"owner": {
"_account_id": 2759
},
"project": "spectrometer",
"status": "MERGED",
"subject": "Add API to return commits since ref",
"submittable": false,
"topic": "git-api",
"updated": "2016-04-19 09:03:03.000000000"
},
...
]
}
"""
mapping = {
'project': request.args.get('project', None),
'branch': request.args.get('branch', 'master')
}
result = check_parameters(mapping)
if not result:
gerrit = GerritHandler(app.config['GERRIT_URL'])
changes = gerrit.project_merged_changes_list(mapping['project'], mapping['branch'])
if not changes:
result = {'error': 'No changes found for {0}.'.format(mapping['project'])}
else:
result = {'changes': changes}
return jsonify(result)
@gerritapi.route('/projects')
def projects():
"""Returns a list of projects by querying Gerrit.
GET /gerrit/projects
JSON::
{
"projects": [
"groupbasedpolicy",
"spectrometer",
"releng/autorelease",
"snmp4sdn",
"ovsdb",
"nemo",
...
]
}
"""
gerrit = GerritHandler(app.config['GERRIT_URL'])
return jsonify({'projects': gerrit.projects_list()})
@gerritapi.route('/tags')
def tags():
"""Returns a list of tags in a given repository by querying Gerrit.
GET /gerrit/tags?param=<value>
:arg str project: Project to query tags from. (required)
JSON::
{
"tags": [
{
"message": "OpenDaylight Beryllium-SR1 release",
"object": "f76cc0a12dc8f06dae3cedc31d06add72df8de5d",
"ref": "refs/tags/release/beryllium-sr1",
"revision": "8b92d614ee48b4fc5ba11c3f38c92dfa14d43655",
"tagger": {
"date": "2016-03-23 13:34:09.000000000",
"email": "thanh.ha@linuxfoundation.org",
"name": "Thanh Ha",
"tz": -240
}
},
...
]
}
"""
mapping = {
'project': request.args.get('project', None),
}
result = check_parameters(mapping)
if not result:
gerrit = GerritHandler(app.config['GERRIT_URL'])
tags = gerrit.project_tags_list(mapping['project'])
if not branches:
result = {'error': 'No tags found for {0}.'.format(mapping['project'])}
else:
result = {'tags': tags}
return jsonify(result)
|
anestv/pa | test/myRequest.py | Python | artistic-2.0 | 1,697 | 0.030642 | from external import requests as reqs
class request:
sessions = {0: reqs.Session()} # static
def __init__(self, url, expect = {}, get = {}, post = {}):
self.url = url
self.get = get
self.post = post
self.expect = expect
self.kind = 'POST' if post else 'GET'
self.session = 0
def checkExpects(self, r):
if 'status' in self.expect:
expStatus = self.expect['status']
del self.expect['status']
else:
expStatus = 200
if r.status_code != expStatus:
print(self.kind + ' request to ' + self.url + ' responded with '
+ str(r.status_code) + ' instead of ' + str(expStatus))
exit(1)
for expHeader in self.ex | pect:
if not expHeader in r.headers:
print('No header "' + expHeader + '" found in response to '
+ self.kind + ' request to ' + self.url)
exit(1)
if r.headers[expHeader] != self.e | xpect[expHeader]:
print('Header "' + expHeader + '" in ' + self.kind + ' request to ' + self.url
+ ' was not '+ self.expect[expHeader] + ' but ' + r.headers[expHeader])
exit(1)
print(' - ' + self.kind + ' request to ' + self.url + ' successful')
def send(self, USE_SESSION):
if USE_SESSION:
if not self.session in self.sessions:
self.sessions[self.session] = reqs.Session()
_reqs = self.sessions[self.session]
else:
_reqs = reqs
#possibly argument custom headers
if self.kind == 'POST':
r = _reqs.post(self.url, params = self.get, data = self.post)
else:
r = _reqs.get(self.url, params = self.get)
self.checkExpects(r)
|
fhoring/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/CustomBaseUri/autorestparameterizedhosttestclient/operations/paths_operations.py | Python | mit | 2,859 | 0.001399 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class PathsOperations(object):
"""PathsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_empty(
self, account_name, custom_headers=None, raw=False, **operation_config):
"""Get a 200 to test a valid base uri.
:param account_name: Account Name
:type account_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsCustomBaseUri.models.ErrorException>`
"""
# Construct URL
url = '/customuri'
path_format_arguments = {
'accountName': self._serialize.url("account_name", account_name, 'str', skip_quote=True),
| 'host': self._serialize.url("self.config.host", self.config.host, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
| query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
|
Terhands/saskdance | app/web/users/__init__.py | Python | gpl-3.0 | 23 | 0 | __author__ = 'tere | sah'
| |
punithpatil/tkinter-text-editor | help_menu.py | Python | mit | 483 | 0.031056 | from common_imports import *
fr | om tkMessageBox import *
import sys
class Help():
def about(root):
showinfo(title="About", message="This a simple text editor implemented in Python's Tkinter")
def main(root,text,menubar):
help = Help()
helpMenu = Menu(menubar)
helpMenu.add_command(label="About", command=help.about)
menubar.add_cascade(label="Help", menu= | helpMenu)
root.config(menu=menubar)
if __name__=="__main__":
print ("Please run 'main.py'")
|
cvsuser-chromium/chromium | chrome/common/extensions/docs/server2/test_util.py | Python | bsd-3-clause | 1,383 | 0.015184 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import logging
import os
import sys
def CaptureLogging(f):
'''Call the function |f|, capturing any logging output generated. |f| must
take no arguments. Returns a list of LogRecords that were emitted.
'''
output = []
class Capture(object):
def filter(self, record):
output.append(record)
cf = Capture()
logging.getLogger('').addFilter(cf)
f()
logging.getLogger('').removeFilter(cf)
return output
def EnableLogging(name):
'''Returns the output of the log with |name| to stdout.
'''
return _ReplaceLogging(name, lambda message, *args: print(message % args))
def DisableLogging(name):
'''Disables the log with |name| for the duration of | the decorated function.
'''
return _ReplaceLogging(name, lambda _, *args: None)
def _ReplaceLogging(name, replacement):
def decorator(fn):
def impl(*args, **optargs):
saved = getattr(logging, name)
setattr(logging, name, replacement)
try:
return fn(*args, **optargs)
finally | :
setattr(logging, name, saved)
return impl
return decorator
def ReadFile(*path):
with open(os.path.join(sys.path[0], '..', '..', *path)) as f:
return f.read()
|
sharanramjee/TensorFlow-plant-and-animal-cell-image-classifier | label_image.py | Python | apache-2.0 | 1,364 | 0.004399 | import tensorflow as tf, sys
image_path = sys.argv[1]
# Read in the image_data
image_data = tf.gfile.FastGFile(image_path, 'rb').read()
# Loads label file, strips off carriage return
label_lines = [line.rstrip() for line
in tf.gfile.GFile("/cellule/retrained_labels.txt")]
# Unpersists graph from file
with tf.gfile.FastGFile("/cellule/retrained_graph.pb", 'rb') as f:
| graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
with tf.Session() as sess:
# Feed the image_data as input to the graph and get first prediction
softmax_tensor = sess.graph.get_tensor_by_name('final_result:0')
predictions = sess.run(softmax_tensor, \
{'DecodeJpeg/contents:0': image_data})
# Sort to show labels of first prediction in order of confidence
top_k = predictions[0].argsort()[-l | en(predictions[0]):][::-1]
for node_id in top_k:
human_string = label_lines[node_id]
score = predictions[0][node_id]
print('%s (score = %.5f)' % (human_string, score))
# To run the program - docker run -it -v ~/Desktop/cellule/:/cellule/ gcr.io/tensorflow/tensorflow:latest-devel
# Enter the above line of code in the docker command prompt
# Then enter - python /cellule/label_image.py <image_location> next to the # in the docker command prompt
|
gwpy/gwsumm | gwsumm/plot/guardian/__main__.py | Python | gpl-3.0 | 6,353 | 0 | # -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2018)
#
# This file is part of GWSumm.
#
# GWSumm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWSumm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWSumm. If not, see <http://www.gnu.org/licenses/>.
"""Plot the segments for a given Guardian node with a given definition
"""
import argparse
import os
import re
import shutil
import sys
from collections import OrderedDict
from configparser import DEFAULTSECT
from gwpy.time import to_gps
from gwdetchar.cli import logger
from ... import globalv
from ...archive import (write_data_archive, read_data_archive)
from ...config import GWSummConfigParser
from ...data import get_timeseries
from ...state import generate_all_state
from ...tabs import GuardianTab
# set matplotlib backend
from matplotlib import use
use('Agg')
__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
__credits__ = 'Alex Urban <alexander.urban@ligo.org>'
PROG = ('python -m gwsumm.plot.guardian' if sys.argv[0].endswith('.py')
else os.path.basename(sys.argv[0]))
LOGGER = logger(name=PROG.split('python -m ').pop())
GWSummConfigParser.OPTCRE = re.compile(
r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$')
# -- utilities ----------------------------------------------------------------
def safe_eval(val):
"""Evaluate the given string as a line of python, if possible
If the :meth:`eval` fails, a `str` is returned instead.
"""
try:
return eval(val)
except (NameError, SyntaxError):
return str(val)
# -- parse command-line -------------------------------------------------------
def create_parser():
"""Create a command-line parser for this entry point
"""
# initialize argument parser
parser = argparse.ArgumentParser(
prog=PROG,
description=__doc__,
)
archopts = parser.add_mutually_exclusive_group()
# positional arguments
parser.add_argument('node')
parser.add_argument('gpsstart', type=to_gps)
parser.add_argument('gpsend', type=to_gps)
parser.add_argument('config', help='config file defining Guardian node')
# optional flags
parser.add_argument(
'-i',
'--ifo',
type=str,
default="L1",
)
parser.add_argument(
'-s',
'--section',
t | ype=str,
help="suffix of INI tab section to read, e.g. give "
"--section='ISC_LOCK' to read [tab-ISC_LOCK] "
"section, defaults to | {node}",
)
parser.add_argument(
'-t',
'--epoch',
type=to_gps,
help="Zero-time for plot, defaults to GPSSTART",
)
parser.add_argument(
'-p',
'--plot-params',
action='append',
default=[],
help="extra plotting keyword argument",
)
parser.add_argument(
'-m',
'--multi-process',
type=int,
default=1,
dest='nproc',
help="number of processes to use, default: %(default)s",
)
parser.add_argument(
'-o',
'--output-file',
default="guardian.png",
help="output file name, default: %(default)s",
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help="print verbose output, default: False",
)
parser.add_argument(
'-P',
'--profile',
action='store_true',
help="print timing output, default: False",
)
# archive options
archopts.add_argument(
'-a',
'--archive',
help="full path of HDF archive for data",
)
archopts.add_argument(
'-r',
'--read-only-archive',
help="full path of HDF archive for data, does not write",
)
# return the argument parser
return parser
# -- main code block ----------------------------------------------------------
def main(args=None):
"""Run the online Guardian node visualization tool
"""
parser = create_parser()
args = parser.parse_args(args=args)
globalv.VERBOSE = args.verbose
globalv.PROFILE = args.profile
args.epoch = args.epoch or args.gpsstart
state = generate_all_state(args.gpsstart, args.gpsend)
# format params
params = {}
for input_ in args.plot_params:
key, val = input_.split('=', 1)
params[key.strip('-')] = safe_eval(val)
# read config
config = GWSummConfigParser(dict_type=OrderedDict)
config.read([args.config])
config.set(DEFAULTSECT, 'gps-start-time', str(int(args.gpsstart)))
config.set(DEFAULTSECT, 'gps-end-time', str(int(args.gpsend)))
config.set(DEFAULTSECT, 'IFO', args.ifo)
sec = 'tab-{}'.format(args.section or args.node)
# read archive
if args.archive and not args.read_only_archive:
args.read_only_archive = args.archive
if args.read_only_archive and os.path.isfile(args.read_only_archive):
read_data_archive(args.read_only_archive)
LOGGER.info(
"Read data archive from {0.read_only_archive}".format(args))
# make tab
tab = GuardianTab.from_ini(config, sec, mode='gps', path='.', plotdir='.')
tab.plots = tab.plots[:1]
tab.plots[0].pargs.update(params)
tab.plots[0].pargs['epoch'] = args.epoch
# process
LOGGER.info("Processing:")
tab.process(nproc=args.nproc)
plotfile = tab.plots[0].outputfile
shutil.copy(plotfile, args.output_file)
os.remove(plotfile)
LOGGER.info("Plot saved to {0.output_file}".format(args))
# crop and save archive
if args.archive:
for channel in globalv.DATA:
globalv.DATA[channel] = get_timeseries(channel, state, query=False)
write_data_archive(args.archive)
LOGGER.info("Archive recorded as {0.archive}".format(args))
# -- run from command-line ----------------------------------------------------
if __name__ == "__main__":
main()
|
hmgoalie35/ui_testing_tool | ui_testing.py | Python | mit | 29,427 | 0.004826 | # Import necessary modules
from selenium.common.exceptions import NoSuchElementException
from PIL import Image
import platform
import os
import inspect
import time
import math
import sys
# Valid methods selenium can use to search for an element on a page. See
# selenium python API for more info if desired.
VALID_METHODS = ['id', 'name', 'xpath', 'link_text',
'partial_link_text', 'tag_name', 'class_name', 'css_selector']
"""
Notes:
Baseline images are those that everything should be compared to. If you want to change the baseline files you can just delete the baseline folder, or just simply run the program in baseline mode
and the program will prompt you if you want to overwrite the current baselines, enter in 'y'. If you want to only change one baseline photo, you can just copy the new baseline file to the baseline folder
or run the program and type 'y' only for the file name you want overwritten.
New images are the ones generated when the program is not in baseline mode, these are the images that have had some html or css changes. The corresponding baseline and new images will be compared to generate
the diff images.
Diff images are those that show the differences between the baseline and new images. I have the program currently generating a .gif that visually shows the change as well as a normal .png that just shows
what the change was as an overlay.
"""
class ui_testing(object):
# Constructor that takes a selenium driver, browser, and is_baseline.
# is_baseline tells the code if it should generate the diff files or not.
def __init__(self, driver, browser, is_baseline):
# Used to keep track of the generated file name.
self.file_path = None
# if baseline or not.
self.is_baseline = is_baseline
# used to keep track of the corresponding directories. (these are set in the setUpDirectories function)
self.baseline_location = None
self.new_location = None
self.diff_location = None
# the browser and selenium driver have to both be the same or the program throws an error. ex: can't use Firefox() driver while passing in chrome to the constructor.
# The browser the test is being run on.
self.browser = browser.lower()
# The selenium driver being used.
self.driver = driver
# list to contain the names of files that have changed, if any.
self.difference_list = []
# used in generating file names for chrome
self.count = 1
# used to keep track if the program should be terminated early or not.
self.early_termination = False
# Set up the directories, see function documentation below.
self.setUpDirectories()
"""
Create the necessary directories used to store the images. All of the housekeeping stuff.
"""
def setUpDirectories(self):
# Get the location of the caller script by inspecting the stack. This is probably not necessary, will revisit.
# TODO fix logic behind where the ui_testing folder is created.
caller_location = os.path.abspath(inspect.stack()[2][1])
# The current directory is where the caller script is located
current_directory = os.path.abspath(
os.path.dirname(caller_location))
# Create a ui_testing folder in the current directory
ui_testing_folder = os.path.abspath(
os.path.join(current_directory, 'ui_testing/'))
# Create a folder that uses the name of the browser
browser_folder = os.path.abspath(
os.path.join(ui_testing_folder, self.browser))
# Create a folder in the browser folder called baseline, and do this for new and diff folders also.
# The new folder is where the new images are saved, the diff folder is
# where the imagemagick results are saved
self.baseline_location = os.path.abspath(
os.path.join(browser_folder, 'baseline/'))
self.new_location = os.path.abspath(
os.path.join(browser_folder, 'new/'))
self.diff_location = os.path.abspath(
os.path.join(browser_folder, 'diff/'))
# Make sure the browser driver and the browser specified in the constructor match.
driverName = self.driver.name
# internet explorer specific nuance.
if driverName == 'internet explorer':
driverName = 'ie'
if driverName != self.browser:
self.driver.quit()
raise Exception(
"[ERROR] the %s driver being used does not match the %s browser specified on the command line." %
(self.driver.name, self.browser))
# Create the directories if they do not already exist.
if not os.path.exists(ui_testing_folder):
os.mkdir(ui_testing_folder)
if not os.path.exists(browser_folder):
os.mkdir(browser_folder)
if not os.path.exists(self.baseline_location):
os.mkdir(self.baseline_location)
if not os.path.exists(self.new_location):
os.mkdir(self.new_location)
if not os.path.exists(self.diff_location):
os.mkdir(self.diff_location)
"""
Params:
[REQUIRED] description: anything that can be used to uniquely identify what the screenshot will be of.
[OPTIONAL] method: the method to be used to search for the element_specifier. Valid methods are 'id',
'name', 'xpath', 'link_text | ', 'partial_link_text', 'tag_name', 'class_name', 'css_selector'. This is to be passed
if you want to crop the screenshot to just an element of the page (ex: just have a picture of a certain button). It is required
if you pass in element_specifier.
[OPTIONAL] element_specifier: the element_specifier used to search for the element. | This is to be passed
if you want to crop the screenshot to just an element of the page (ex: just have a picture of a certain button). It is required
if you pass in method.
*****Element cropping is not supported on chrome!!!! This is due to a limitation of chromedriver*****
If method is passed to the function, then element_specifier needs to be passed to the function and vice versa
By default a screenshot of the whole page is taken. If method and element_specifier are passed to the function then
the full page screenshot will be cropped to just that element.
ex: generateFileNameAndTakeScreenshot('google_landing_page_text_entry_box', 'id', 'whatever_the_id_is')
This function generates a file name for the image that is going to be saved and then takes a screenshot. If the browser being used is chrome,
then we need to take multiple screenshots because chromedriver does not support full page screenshots.
"""
def generateFileNameAndTakeScreenshot(self, description, method=None, element_specifier=None):
# If the browser is chrome and an element_specifier or method was passed (i.e. user is trying to crop an element) throw an error.
if self.browser == "chrome" and (element_specifier or method):
self.driver.quit()
raise Exception(
"Cropping specific elements is not supported when using chrome, this is due to a limitation of chromedriver. Please remove %s & %s from the generateFileNameAndTakeScreenshot function and run again." %
(method, element_specifier))
# File extension
file_extension = '.png'
# The operating system
op_sys = platform.system().lower()
# python's platform.system() returns 'darwin' on macs so change it for clarification.
if op_sys == 'darwin':
op_sys = 'mac'
# Throw an error if a description was not passed in.
if description:
# Generate the baselines if this is a baseline run.
if self.is_baseline:
# Generate the file name. This is created by concatenating the
# description, browser and operating system. _baseline is also
# appended for clarification.
file_name = str(description) + '_' + self.br |
mkhutornenko/incubator-aurora | src/test/python/apache/aurora/executor/test_thermos_executor.py | Python | apache-2.0 | 18,559 | 0.009268 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import getpass
import os
import signal
import subprocess
import tempfile
import threading
import time
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from collections import defaultdict
import mesos_pb2 as mesos_pb
from thrift.TSerialization import serialize
from twitter.common import log
from twitter.common.contextutil import temporary_dir
from twitter.common.dirutil import safe_mkdtemp, safe_rmtree
from twitter.common.exceptions import ExceptionalThread
from twitter.common.log.options import LogOptions
from twitter.common.quantity import Amount, Time
from apache.aurora.config.schema.base import (
HealthCheckConfig,
MB,
MesosJob,
MesosTaskInstance,
Process,
Resources,
Task
)
from apache.aurora.executor.aurora_executor import AuroraExecutor
from apache.aurora.executor.common.executor_timeout import ExecutorTimeout
from apache.aurora.executor.common.health_checker import HealthCheckerProvider
from apache.aurora.executor.common.sandbox import DirectorySandbox, SandboxProvider
from apache.aurora.executor.common.task_runner import TaskError
from apache.aurora.executor.status_manager import StatusManager
from apache.aurora.executor.thermos_task_runner import (
DefaultThermosTaskRunnerProvider,
ThermosTaskRunner
)
from apache.thermos.common.path import TaskPath
from apache.thermos.core.runner import TaskRunner
from apache.thermos.monitoring.monitor import TaskMonitor
from gen.apache.aurora.api.constants import AURORA_EXECUTOR_NAME
from gen.apache.aurora.api.ttypes import AssignedTask, ExecutorConfig, Identity, TaskConfig
if 'THERMOS_DEBUG' in os.environ:
LogOptions.set_stderr_log_level('google:DEBUG')
LogOptions.set_simple(True)
log.init('executor_logger')
class FastThermosExecutor(AuroraExecutor):
STOP_WAIT = Amount(0, Time.SECONDS)
class FastStatusManager(StatusManager):
POLL_WAIT = Amount(10, Time.MILLISECONDS)
class DefaultTestSandboxProvider(SandboxProvider):
def from_assigned_task(self, assigned_task):
return DirectorySandbox(safe_mkdtemp())
class FailingStartingTaskRunner(ThermosTaskRunner):
def start(self):
raise TaskError('I am an idiot!')
class FailingSandbox(DirectorySandbox):
def create(self):
raise self.CreationError('Could not create directory!')
class FailingSandboxProvider(SandboxProvider):
def from_assigned_task(self, assigned_task):
return FailingSandbox(safe_mkdtemp())
class SlowSandbox(DirectorySandbox):
def __init__(self, *args, **kwargs):
super(SlowSandbox, self).__init__(*args, **kwargs)
self.is_initialized = lambda: False
self._init_start = threading.Event()
self._init_done = threading.Event()
def create(self):
self._init_start.wait()
super(SlowSandbox, self).create()
self.is_initialized = lambda: True
self._init_done.set()
class SlowSandboxProvider(SandboxProvider):
def from_assigned_task(self, assigned_task):
return SlowSandbox(safe_mkdtemp())
class ProxyDriver(object):
def __init__(self):
self.method_calls = defaultdict(list)
self._stop_event = threading.Event()
def __getattr__(self, attr):
def enqueue_arguments(*args, **kw):
self.method_calls[attr].append((args, kw))
return enqueue_arguments
def stop(self, *args, **kw):
self.method_calls['stop'].append((args, kw))
self._stop_event.set()
def wait_stopped(self):
return self._stop_event.wait()
def make_task(thermos_config, assigned_ports={}, **kw):
role = getpass.getuser()
task_id = thermos_config.task().name().get() + '-001'
at = AssignedTask(
taskId=task_id,
task=TaskConfig(
executorConfig=ExecutorConfig(
name=AURORA_EXECUTOR_NAME,
data=thermos_config.json_dumps()),
owner=Identity(role=role, user=role)),
assignedPorts=assigned_ports,
**kw)
td = mesos_pb.TaskInfo()
td.task_id.value = task_id
td.name = thermos_config.task().name().get()
td.data = serialize(at)
return td
BASE_MTI = MesosTaskInstance(instance=0, role=getpass.getuser())
BASE_TASK = Task(resources=Resources(cpu=1.0, ram=16 * MB, disk=32 * MB))
HELLO_WORLD_TASK_ID = 'hello_world-001'
HELLO_WORLD = BASE_TASK(
name='hello_world',
processes=[Process(name='hello_world_{{thermos.task_id}}', cmdline='echo hello world')])
HELLO_WORLD_MTI = BASE_MTI(task=HELLO_WORLD)
SLEEP60 = BASE_TASK(processes=[Process(name='sleep60', cmdline='sleep 60')])
SLEEP2 = BASE_TASK(processes=[Process(name='sleep2', cmdline='sleep 2')])
SLEEP60_MTI = BASE_MTI(task=SLEEP60)
MESOS_JOB = MesosJob(
name='does_not_matter',
instances=1,
role=getpass.getuser(),
)
def make_provider(checkpoint_root, runner_class=ThermosTaskRunner):
return DefaultThermosTaskRunnerProvider(
pex_location=os.path.join('dist', 'thermos_runner.pex'),
checkpoint_root=checkpoint_root,
task | _runner_class=runner_class,
)
def make_executor(
proxy_driver,
checkpoint_root,
task,
ports={},
fast_status=False,
runner_class=ThermosTaskRunner,
status_providers=()):
status_manager_class = FastStatusManager if fast_status else StatusManager
runner_provider = make_provider(checkpoint_root, runner_class)
te = FastThermosExecutor(
| runner_provider=runner_provider,
status_manager_class=status_manager_class,
sandbox_provider=DefaultTestSandboxProvider,
status_providers=status_providers,
)
ExecutorTimeout(te.launched, proxy_driver, timeout=Amount(100, Time.MILLISECONDS)).start()
task_description = make_task(task, assigned_ports=ports, instanceId=0)
te.launchTask(proxy_driver, task_description)
te.status_manager_started.wait()
sampled_metrics = te.metrics.sample()
assert 'kill_manager.enabled' in sampled_metrics
for checker in te._chained_checker._status_checkers: # hacky
assert ('%s.enabled' % checker.name()) in sampled_metrics
while len(proxy_driver.method_calls['sendStatusUpdate']) < 2:
time.sleep(0.1)
# make sure startup was kosher
updates = proxy_driver.method_calls['sendStatusUpdate']
assert len(updates) == 2
status_updates = [arg_tuple[0][0] for arg_tuple in updates]
assert status_updates[0].state == mesos_pb.TASK_STARTING
assert status_updates[1].state == mesos_pb.TASK_RUNNING
# wait for the runner to bind to a task
while True:
runner = TaskRunner.get(task_description.task_id.value, checkpoint_root)
if runner:
break
time.sleep(0.1)
assert te.launched.is_set()
return runner, te
class UnhealthyHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write('not ok')
class HealthyHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write('ok')
class SignalServer(ExceptionalThread):
def __init__(self, handler):
self._server = HTTPServer(('', 0), handler)
super(SignalServer, self).__init__()
self.daemon = True
self._stop = threading.Event()
def run(self):
while not self._stop.is_set():
self._server.handle_request()
def __enter__(self):
self.start()
return self._server.server_port
def __exit__(self, exc_type, exc_val, traceback):
self._stop.set()
class TestThermosExecutor(object):
PANTS_BUILT = False
LOG_DIR = None
@classmethod
def setup_class(cls):
cls.LOG_DIR = tempfile.mkdtemp()
LogOptions.set_log_dir(cls.LOG_DIR)
LogOptions.set_disk_log_level('DEBUG')
log.init('executor_logger')
if not cls.PANTS_BUILT and 'SKIP_PANTS_BUILD' not in os.environ:
assert subprocess.call([". |
robwarm/gpaw-symm | doc/tutorials/lattice_constants/al.agts.py | Python | gpl-3.0 | 1,167 | 0.000857 | def agts(queue):
al = queue.add('al.py', ncpus=8, walltime=12 * 60)
queue.add('al.agts.py', deps=[al],
creates=['Al_conv_ecut.png', 'Al_conv_k.png'])
if __name__ == '__main__':
import pylab as plt
from ase.utils.eos import EquationOfState
from ase.io import read
def fit(filename):
configs = read(filename + '@:')
volumes = [a.get_volume() for a in configs]
energies = [a.get_potential_energy() for a in configs]
eos = EquationOfState(volume | s, energies)
v0, e0, B = eos.fit()
return (4 * v0)**(1 / 3.0)
cutoffs = range(200, 501, 50)
a = [fit('Al-%d.txt' % ecut) for ecut in cutoffs]
plt.figure(figsize=(6, 4))
plt.plot(cutoffs, a, 'o-')
plt.axis(ymin=4.03, ymax=4.05)
plt.xlabel('Plane-wave cutoff energy [eV]')
plt.ylabel('lattice constant [Ang]')
plt.savefig('Al_conv_ecut.pn | g')
kpoints = range(4, 17)
plt.figure(figsize=(6, 4))
a = [fit('Al-%02d.txt' % k) for k in kpoints]
plt.plot(kpoints, a, '-')
plt.xlabel('number of k-points')
plt.ylabel('lattice constant [Ang]')
plt.savefig('Al_conv_k.png')
plt.show()
|
Code4SA/umibukela | umibukela/migrations/0002_auto_20160120_1337.py | Python | mit | 1,080 | 0.001852 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SurveyType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
],
),
migrations.AlterField(
model_ | name='partner',
name='context_statement',
field=models.TextField(),
),
migrations.AlterField(
model_name='partner',
name='intro_statement',
field=models.TextField(),
),
migrations.AddField(
model_name='cycleresultset',
name='survey_type',
field=models.ForeignKey(blank=True, to='umibukela.SurveyType', null=True), |
),
]
|
khchine5/xl | lino_xl/lib/cal/models.py | Python | bsd-2-clause | 36,436 | 0.001729 | # -*- coding: UTF-8 -*-
# Copyright 2011-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
from __future__ import unicode_literals
from builtins import str
import six
import datetime
from django.db import models
from django.db.models import Q
from django.conf import settings
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.core.exceptions import ValidationError
from django.utils import timezone
from lino import mixins
from lino.api import dd, rt, _, pgettext
from .choicelists import (
DurationUnits, Recurrencies, Weekdays, AccessClasses, PlannerColumns)
from .utils import setkw, dt2kw, when_text
from lino.modlib.checkdata.choicelists import Checker
from lino.modlib.printing.mixins import TypedPrintable
from lino.modlib.printing.mixins import Printable
from lino.modlib.users.mixins import UserAuthored, Assignable
from lino_xl.lib.postings.mixins import Postable
from lino_xl.lib.outbox.mixins import MailableType, Mailable
from lino_xl.lib.contacts.mixins import ContactRelated
from lino.modlib.office.roles import OfficeStaff
from .workflows import (TaskStates, EntryStates, GuestStates)
from .actions import UpdateGuests
from .mixins import Component
from .mixins import EventGenerator, RecurrenceSet, Reservation
from .mixins import Ended
from .mixins import MoveEntryNext, UpdateEntries, UpdateEntriesByEvent
from .actions import ShowEntriesByDay
from .ui import ConflictingEvents
DEMO_START_YEAR = 2013
class CalendarType(object):
def validate_calendar(self, cal):
pass
class LocalCalendar(CalendarType):
label = "Local Calendar"
class GoogleCalendar(CalendarType):
label = "Google Calendar"
def validate_calendar(self, cal):
if not cal.url_template:
cal.url_template = \
"https://%(username)s:%(password)s@www.google.com/calendar/dav/%(username)s/"
CALENDAR_CHOICES = []
CALENDAR_DICT = {}
def register_calendartype(name, instance):
CALENDAR_DICT[name] = instance
CALENDAR_CHOICES.append((name, instance.label))
register_calendartype('local', LocalCalendar())
register_calendartype('google', GoogleCalendar())
class DailyPlannerRow(mixins.BabelDesignated, mixins.Sequenced):
class Meta:
app_label = 'cal'
abstract = dd.is_abstract_model(__name__, 'PlannerRow')
verbose_name = _("Planner row")
verbose_name_plural = _("Planner rows")
ordering = ['seqno']
start_time = models.TimeField(
blank=True, null=True,
verbose_name=_("Start time"))
end_time = models.TimeField(
blank=True, null=True,
verbose_name=_("End time"))
from lino.mixins.periods import ObservedDateRange
from etgen.html import E
from lino.utils import join_elems
class DailyPlannerRows(dd.Table):
model = 'cal.DailyPlannerRow'
column_names = "seqno designation start_time end_time"
required_roles = dd.login_required(OfficeStaff)
class DailyPlanner(DailyPlannerRows):
label = _("Daily planner")
editable = False
parameters = dict(
date=models.DateField(
_("Date"), help_text=_("Date to show")),
user=dd.ForeignKey('users.User', null=True, blank=True))
@classmethod
def param_defaults(cls, ar, **kw):
kw = super(DailyPlanner, cls).param_defaults(ar, **kw)
kw.update(date=dd.today())
# kw.update(end_date=dd.today())
# kw.update(user=ar.get_user())
return kw
@classmethod
def setup_columns(self):
names = ''
for i, vf in enumerate(self.get_ventilated_columns()):
self.add_virtual_field('vc' + str(i), vf)
names += ' ' + vf.name + ':20'
self.column_names = "overview {}".format(names)
#~ logger.info("20131114 setup_columns() --> %s",self.column_names)
@classmethod
def get_ventilated_columns(cls):
Event = rt.models.cal.Event
def fmt(e):
t = str(e.start_time)[:5]
u = e.user
if u is None:
return "{} {}".format(
t, e.room)
return t
u = u.initials or u.username or str(u)
| return "{} {}".format(t, u)
def w(pc, verbose_name):
def func(fld, obj, ar):
# obj is the DailyPlannerRow instance
pv = ar.param_values
qs = Event.objects.f | ilter(event_type__planner_column=pc)
if pv.user:
qs = qs.filter(user=pv.user)
if pv.date:
qs = qs.filter(start_date=pv.date)
if obj.start_time:
qs = qs.filter(start_time__gte=obj.start_time,
start_time__isnull=False)
if obj.end_time:
qs = qs.filter(start_time__lt=obj.end_time,
start_time__isnull=False)
if not obj.start_time and not obj.end_time:
qs = qs.filter(start_time__isnull=True)
qs = qs.order_by('start_time')
chunks = [e.obj2href(ar, fmt(e)) for e in qs]
return E.p(*join_elems(chunks))
return dd.VirtualField(dd.HtmlBox(verbose_name), func)
for pc in PlannerColumns.objects():
yield w(pc, pc.text)
class RemoteCalendar(mixins.Sequenced):
class Meta:
app_label = 'cal'
abstract = dd.is_abstract_model(__name__, 'RemoteCalendar')
verbose_name = _("Remote Calendar")
verbose_name_plural = _("Remote Calendars")
ordering = ['seqno']
type = models.CharField(_("Type"), max_length=20,
default='local',
choices=CALENDAR_CHOICES)
url_template = models.CharField(_("URL template"),
max_length=200, blank=True) # ,null=True)
username = models.CharField(_("Username"),
max_length=200, blank=True) # ,null=True)
password = dd.PasswordField(_("Password"),
max_length=200, blank=True) # ,null=True)
readonly = models.BooleanField(_("read-only"), default=False)
def get_url(self):
if self.url_template:
return self.url_template % dict(
username=self.username,
password=self.password)
return ''
def save(self, *args, **kw):
ct = CALENDAR_DICT.get(self.type)
ct.validate_calendar(self)
super(RemoteCalendar, self).save(*args, **kw)
class Room(mixins.BabelNamed, ContactRelated):
class Meta:
app_label = 'cal'
abstract = dd.is_abstract_model(__name__, 'Room')
verbose_name = _("Room")
verbose_name_plural = _("Rooms")
description = dd.RichTextField(_("Description"), blank=True)
dd.update_field(
Room, 'company', verbose_name=_("Responsible"))
dd.update_field(
Room, 'contact_person', verbose_name=_("Contact person"))
class Priority(mixins.BabelNamed):
class Meta:
app_label = 'cal'
verbose_name = _("Priority")
verbose_name_plural = _('Priorities')
ref = models.CharField(max_length=1)
@dd.python_2_unicode_compatible
class EventType(mixins.BabelNamed, mixins.Sequenced, MailableType):
templates_group = 'cal/Event'
class Meta:
app_label = 'cal'
abstract = dd.is_abstract_model(__name__, 'EventType')
verbose_name = _("Calendar entry type")
verbose_name_plural = _("Calendar entry types")
ordering = ['seqno']
description = dd.RichTextField(
_("Description"), blank=True, format='html')
is_appointment = models.BooleanField(_("Appointment"), default=True)
all_rooms = models.BooleanField(_("Locks all rooms"), default=False)
locks_user = models.BooleanField(_("Locks the user"), default=False)
start_date = models.DateField(
verbose_name=_("Start date"),
blank=True, null=True)
event_label = dd.BabelCharField(
_("Entry l |
noironetworks/networking-cisco | networking_cisco/db/migration/alembic_migrations/versions/mitaka/expand/203b495958cf_add_port_profile_delete_table_for_ucsm_.py | Python | apache-2.0 | 1,179 | 0.000848 | # Copyright 2017 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distr | ibuted under the License is distributed on an "AS | IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from alembic import op
import sqlalchemy as sa
"""Add Port Profile delete table for UCSM plugin
Revision ID: 203b495958cf
Revises: b29f1026b281
Create Date: 2017-01-03 16:25:03.426346
"""
# revision identifiers, used by Alembic.
revision = '203b495958cf'
down_revision = 'b29f1026b281'
def upgrade():
op.create_table('ml2_ucsm_delete_port_profiles',
sa.Column('profile_id', sa.String(length=64), nullable=False),
sa.Column('device_id', sa.String(length=64), nullable=False),
sa.PrimaryKeyConstraint('profile_id', 'device_id')
)
|
digitalocean/netbox | netbox/dcim/migrations/0104_correct_infiniband_types.py | Python | apache-2.0 | 914 | 0 | from django.db import migrations
INFINIBAND_SLUGS = (
('inifiband-sdr', 'infiniband-sdr'),
('inifiband-ddr', 'infiniband-ddr'),
('inifiband-qdr', 'infiniband-qdr'),
('inifiband-fdr10', 'infiniband-fdr10'),
('inifiband-fdr', 'infiniband-fdr'),
('inifiban | d-edr', 'infiniband-edr'),
('inifiband-hdr', 'infiniband-hdr'),
('inifiband-ndr', 'infiniband-ndr'),
('inifiband-xdr', 'infin | iband-xdr'),
)
def correct_infiniband_types(apps, schema_editor):
Interface = apps.get_model('dcim', 'Interface')
for old, new in INFINIBAND_SLUGS:
Interface.objects.filter(type=old).update(type=new)
class Migration(migrations.Migration):
dependencies = [
('dcim', '0103_standardize_description'),
]
operations = [
migrations.RunPython(
code=correct_infiniband_types,
reverse_code=migrations.RunPython.noop
),
]
|
jn-gautier/exercices_moodle | phys/elec/elec_base.py | Python | gpl-3.0 | 23,892 | 0.054767 | #! /usr/bin/python3
# -*- coding: utf-8 -*-
import random
"""A faire :
convertir les questions en cloze pour gérer les unités
réécrire les feedback en respectant la présentation DIER
écrire les dernière questions
"""
def convert_sci(nombre):
if (nombre>1000) or (nombre<0.1):
nombre_sciE='%.4E' %(nombre)
nombre_sciE=nombre_sciE.split('E')
#
base=nombre_sciE[0]
base=base.replace('.',',')
exposant=nombre_sciE[1]
#
decimales=base.split(',')[1]
arrondi=1
for chiffre in decimales:
if chiffre!='0':
arrondi=0
if arrondi==1: #s'il n'y a pas de décimales
base=base.split(',')[0]
#
nombre_sci=base+'\\times 10^{'+exposant+'}'
else:
nombre_str=str(nombre)
nombre_str=nombre_str.replace('.',',')
partie_entiere=nombre_str.split(',')[0]
entiers_significatifs=len(partie_entiere)
if partie_entiere=='0':
entiers_significatifs=0
#decimaux_significatifs=0
partie_decimale=''
if (len(nombre_str.split(','))>1):#s'il y a une partie decimale
partie_decimale=nombre_str.split(',')[1]
#decimaux_significatifs=len(partie_decimale)
decimaux_a_prendre=4-entiers_significatifs
decimaux_pris=partie_decimale[0:decimaux_a_prendre]
if decimaux_pris!='':
nombre_sci=partie_entiere+','+decimaux_pris
else:
nombre_sci=partie_entiere
return nombre_sci
#
#Quelle est l'intensité du courant dans une résistance de ### ohms si la différence de potentiel aux bornes de celle-ci est de ### [V]?
#Quelle devrait être la valeur d'une résistance soumise à une tension de ### [V] pour que l'intensité du courant qui la traverse soit de ### [A]?
"""
P=U/R
U=PR
R=U/P
E=U*dt/R
"""
class Question:
def __init__(self):
#
self.resistance=random.randint(2,30)
self.intensite=random.randint(1,20)
self.duree=random.randint(10,120)
self.potentiel=self.resistance*self.intensite
self.charge=self.intensite*self.duree
self.puissance=self.potentiel*self.intensite
self.energie=self.potentiel*self.intensite*self.duree
self.energie_str=convert_sci(self.energie)
#self.ponderateur=random.choice([[0.4,0.5,0.7,0.8],[0.5,0.7,0.8,1.1],[0.7,0.8,1.1,1.2],[0.8,1.1,1.2,1.4],[1.1,1.2,1.4,1.6]])
#
def type_1(self):
"""U=R*I """
self.enonce="Quelle devrait être la différence de potentiel aux bornes d'une résistance de %s [ohm] pour que l'intensité du courant qui la traverse soit de %s [A]?"%(self.resistance,self.intensite)
self.reponse=self.potentiel
self.feedback="<p>\(U=R \cdot I\) </p>"
self.feedback+="<p>\(U=%s \cdot %s \) </p>"%(self.resistance,self.intensite)
self.feedback+="<p>U= %s [V]</p>"%(self.potentiel)
self.unites='[V]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_2(self):
"""R=U/I """
self.enonce="Quelle devrait être la valeur de la résistance d'une ampoule soumise à une tension de %s [V] si l'intensité du courant qui la traverse ne peut pas dépasser %s [A]?"%(self.potentiel,self.intensite)
self.reponse=self.resistance
self.feedback="<p>\(U=R \cdot I \\rightarrow R=\\frac{U}{I}\) </p>"
self.feedback+="<p>\(R=\\frac{%s}{%s} \) </p>"%(self.potentiel,self.intensite)
self.feedback+="<p>\(R= %s [\omega] \)</p>"%(self.resistance)
self.unites='[ohm]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_3(self):
""" U=Rq/dt """
self.enonce="À quelle différence de potentiel faut-il soumettre une résistance de %s [ohm] pour qu'une charge total de %s [C] la traverse dans un intervalle de %s [s]?"%(self.resistance,self.charge,self.duree)
self.reponse=self.potentiel
self.feedback="<p>\(U=R \cdot I \\ ; \\ I=\\frac{q}{\Delta t} \)</p>"
self.feedback+="<p>\( \\rightarrow U=\\frac{R \cdot q}{\Delta t} \)</p>"
self.feedback+="<p>\(U=\\frac{%s \cdot %s}{ %s} \) </p>"%(self.resistance,self.charge,self.duree)
self.feedback+="<p>U= %s [V]</p>"%(self.potentiel)
self.unites='[V]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_4(self):
""" R=U*dt/q """
self.enonce="On souhaite qu'une charge totale de %s [C] puisse passer au travers d'une résistance de soumise à une différence de potentiel de %s [V] dans un intervalle de %s [s]. Quelle doit être la valeur de cette résistance?"%(self.charge,self.potentiel,self.duree)
self.reponse=self.resistance
self.feedback="<p>\(U=R \cdot I \\ ; \\ I=\\frac{q}{\Delta t} \)</p>"
self.feedback+="<p>\( \\rightarrow R=\\frac{U \cdot \Delta t}{q} \)</p>"
self.feedback+="<p>\( R=\\frac{%s \cdot %s}{%s} \) </p>"%(self.potentiel,self.duree,self.charge)
self.feedback+="<p>\(R= %s [\omega] \)</p>"%(self.resistance)
self.unites='[ohm]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_5(self):
""" I=U/R """
self.enonce="Une résistance de %s [ohm] est soumise à une différence de potentiel de %s [V]. Quelle est l'intensité du courant qui traverse la résistance?"%(self.resistance,self.potentiel)
self.reponse=self.intensite
self.feedback="<p>\(U=R \cdot I \\rightarrow I=\\frac{U}{R}\) </p>"
self.feedback+="<p>\(I=\\frac{%s}{%s} \) </p>"%(self.potentiel,self.resistance)
self.feedback+="<p>\(I= %s [A] \)</p>"%(self.intensite)
self.unites='[A]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_6(self):
""" q=(U*dt)/R """
self.enonce="Quelle quantité de charges aura traversé une résistance de %s [ohm] soumise à une différence de potentiel de %s [V] durant %s [s]?"%(self.resistance,self.potentiel,self.duree)
self.reponse=self.charge
self.feedback="<p>\(U=R \cdot I \\ ; \\ I=\\frac{q}{\Delta t} \)</p>"
self.feedback+="<p>\( \\rightarrow q=\\frac{U \cdot \Delta t}{R} \)</p>"
self.feedback+="<p>\( q=\\frac{%s \cdot %s}{%s} \) </p>"%(self.potentiel,self.duree,self.resistance)
self.feedback="<p>\(q= %s [C] \)</p>"%(self.charge)
self.unites='[C]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_7(self):
""" P=U*I """
self.enonce="Dans un circuit simple constitué d'un générateur et d'un récepteur, le récepteur est traversé par un courant de %s [A] lorsque la différence de potentiel aux bornes du générateur est de %s [V]. Quelle est la puissance du récepteur?"%(self.intensite,self.potentiel)
self.reponse=self.puissance
self.feedback="<p>\(P=U \cdot I\) </p>"
self.feedback+="<p>\(P=%s \cdot %s \) </p>"%(self.potentiel,self.intensite)
self.feedback+="<p>P= %s [W]</p>" | %(self.puissance)
self.unites='[W]'
self.unites_fausses=['[C]','[s]','[A]','[ohm]','[W]','[J]','[V]']
random.shuffle(self.unites_fausses)
def type_8(self):
""" P=Uq/dt """
self.enonce="Une batterie contenant une charge totale de %s [C] est capable de générer une différence de potentiel de %s [V]. Un récepteur alimenté par cette batterie peut fonctionner durant %s [s]. Quelle est la puissance de cet | te batterie?"%(self.charge,self.potentiel,self.duree |
jittat/adm2 | scripts/export_scores_for_registered.py | Python | agpl-3.0 | 770 | 0.011688 | import codecs
import sys
import os
from django.conf import settings
from django_bootstrap import bootstrap
bootstrap(__file__)
from result.models import NIETSScores
from application.models import Applicant
from confirmation.models import StudentRegistration, AdmissionWaiver
def main():
uses_nat_id = ('--nat' in sys.argv)
registrations = StudentRegistration.objects.all()
c = 0
for reg in registrations:
a = reg.applicant
if a.admission_results.count()!=0 an | d not AdmissionWaiver.is_waived(a):
niets_scores = a.NIETS_scores
k = a.id
if uses_nat_id:
k = a.national_id
| print "%s,%f" % (k,niets_scores.get_score())
c += 1
if __name__=='__main__':
main()
|
plilja/adventofcode | 2018/day19/day19.py | Python | gpl-3.0 | 2,591 | 0.000772 | import sys
from math import sqrt
def opr(f):
def op(registers, args):
registers[args[2]] = f(registers[args[0]], registers[args[1]])
return op
def opi(f):
def op(registers, args):
registers[args[2]] = f(registers[args[0]], args[1])
return op
def opir(f):
def op(registers, args):
registers[args[2]] = f(args[0], registers[args[1]])
return op
def setr(registers, args):
registers[args[2]] = registers[args[0]]
def seti(registers, args):
registers[args[2]] = args[0]
ops = {
'addr': opr(lambda a, b: a + b),
'addi': opi(lambda a, b: a + b),
'mulr': opr(lambda a, b: a * b),
'muli': opi(lambda a, b: a * b),
'banr': opr(lambda a, b: a & b),
'bani': opi(lambda a, b: a & b),
'borr': opr(lambda a, b: a | b),
'bori': opi(lambda a, b: a | b),
'setr': setr,
'seti': seti,
'gtrr': opr(lambda a, b: 1 if a > b else 0),
'gtri': opi(lambda a, b: 1 if a > b else 0),
'gtir': opir(lambda a, b: 1 if a > b else 0),
'eqrr': opr(lambda a, b: 1 if a == b else 0),
'eqri': opi(lambda a, b: 1 if a == b else 0),
'eqir': opir(lambda a, b: 1 if a == b else 0)
}
def step1(ip, instructions):
registers = [0, 0, 0, 0, 0, 0]
while 0 <= registers[ip] < len(instructions):
instruction = instructions[registers[ip]]
op = ops[instruction[0]]
op(registers, instruction[1:])
| if registers[ip] + 1 >= len(instructions):
break
registers[ip] += 1
return registers[0]
def step2(ip, instructions):
registers = [1, 0, 0, 0, 0, 0]
while 0 <= registers[ip] < len(instructions):
if 2 == registers[ip] or 13 == r | egisters[ip]:
# The instructions are an algorithm for summing divisors of register 2
num = registers[2]
r = 0
for i in range(1, int(sqrt(num)) + 1):
if num % i == 0:
r += i + num // i
return r
instruction = instructions[registers[ip]]
op = ops[instruction[0]]
op(registers, instruction[1:])
if registers[ip] + 1 >= len(instructions):
break
registers[ip] += 1
return registers[0]
def parse_input():
instructions = []
ip = None
for s in sys.stdin:
args = s.split()
if args[0] == '#ip':
ip = int(args[1])
else:
instructions += [[args[0]] + list(map(int, args[1:]))]
return ip, instructions
ip, instructions = parse_input()
print(step1(ip, instructions))
print(step2(ip, instructions))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.